def test_fetch_url_connectionerror(open_url_mock, fake_ansible_module): open_url_mock.side_effect = ConnectionError('TESTS') with pytest.raises(FailJson) as excinfo: fetch_url(fake_ansible_module, 'http://ansible.com/') assert excinfo.value.kwargs['msg'] == 'TESTS' open_url_mock.side_effect = ValueError('TESTS') with pytest.raises(FailJson) as excinfo: fetch_url(fake_ansible_module, 'http://ansible.com/') assert excinfo.value.kwargs['msg'] == 'TESTS'
def uri(module, url, dest, body, body_format, method, headers, socket_timeout): # is dest is set and is a directory, let's check if we get redirected and # set the filename from that url redirected = False redir_info = {} r = {} if dest is not None: # Stash follow_redirects, in this block we don't want to follow # we'll reset back to the supplied value soon follow_redirects = module.params['follow_redirects'] module.params['follow_redirects'] = False dest = os.path.expanduser(dest) if os.path.isdir(dest): # first check if we are redirected to a file download _, redir_info = fetch_url(module, url, data=body, headers=headers, method=method, timeout=socket_timeout) # if we are redirected, update the url with the location header, # and update dest with the new url filename if redir_info['status'] in (301, 302, 303, 307): url = redir_info['location'] redirected = True dest = os.path.join(dest, url_filename(url)) # if destination file already exist, only download if file newer if os.path.exists(dest): t = datetime.datetime.utcfromtimestamp(os.path.getmtime(dest)) tstamp = t.strftime('%a, %d %b %Y %H:%M:%S +0000') headers['If-Modified-Since'] = tstamp # Reset follow_redirects back to the stashed value module.params['follow_redirects'] = follow_redirects resp, info = fetch_url(module, url, data=body, headers=headers, method=method, timeout=socket_timeout) try: content = resp.read() except AttributeError: # there was no content, but the error read() # may have been stored in the info as 'body' content = info.pop('body', '') r['redirected'] = redirected or info['url'] != url r.update(redir_info) r.update(info) return r, content, dest
def add(self): # Check if required attributes are present if self._module.params['resource'] is None: self.fail_module(msg='NITRO resource is undefined.') if self._module.params['attributes'] is None: self.fail_module(msg='NITRO resource attributes are undefined.') url = '%s://%s/nitro/v1/config/%s' % ( self._module.params['nitro_protocol'], self._module.params['nsip'], self._module.params['resource'], ) data = self._module.jsonify({self._module.params['resource']: self._module.params['attributes']}) r, info = fetch_url( self._module, url=url, headers=self._headers, data=data, method='POST', ) result = {} self.edit_response_data(r, info, result, success_status=201) if result['nitro_errorcode'] == 0: self._module_result['changed'] = True else: self._module_result['changed'] = False return result
def axapi_call_v3(module, url, method=None, body=None, signature=None): ''' Returns a datastructure based on the result of the API call ''' if signature: headers = {'content-type': 'application/json', 'Authorization': 'A10 %s' % signature} else: headers = {'content-type': 'application/json'} rsp, info = fetch_url(module, url, method=method, data=body, headers=headers) if not rsp or info['status'] >= 400: module.fail_json(msg="failed to connect (status code %s), error was %s" % (info['status'], info.get('msg', 'no error given'))) try: raw_data = rsp.read() data = json.loads(raw_data) except ValueError: # at least one API call (system.action.write_config) returns # XML even when JSON is requested, so do some minimal handling # here to prevent failing even when the call succeeded if 'status="ok"' in raw_data.lower(): data = {"response": {"status": "OK"}} else: data = {"response": {"status": "fail", "err": {"msg": raw_data}}} except: module.fail_json(msg="could not read the result from the host") finally: rsp.close() return data
def get(self): if self._module.params['resource'] is None: self.fail_module(msg='NITRO resource is undefined.') if self._module.params['name'] is None: self.fail_module(msg='NITRO resource name is undefined.') url = '%s://%s/nitro/v1/config/%s/%s' % ( self._module.params['nitro_protocol'], self._module.params['nsip'], self._module.params['resource'], self._module.params['name'], ) r, info = fetch_url( self._module, url=url, headers=self._headers, method='GET', ) result = {} self.edit_response_data(r, info, result, success_status=200) self.handle_get_return_object(result) self._module_result['changed'] = False return result
def create(module, name, user, passwd, token, requester_id, service, hours, minutes, desc): now = datetime.datetime.utcnow() later = now + datetime.timedelta(hours=int(hours), minutes=int(minutes)) start = now.strftime("%Y-%m-%dT%H:%M:%SZ") end = later.strftime("%Y-%m-%dT%H:%M:%SZ") url = "https://" + name + ".pagerduty.com/api/v1/maintenance_windows" headers = { 'Authorization': auth_header(user, passwd, token), 'Content-Type' : 'application/json', } request_data = {'maintenance_window': {'start_time': start, 'end_time': end, 'description': desc, 'service_ids': service}} if requester_id: request_data['requester_id'] = requester_id else: if token: module.fail_json(msg="requester_id is required when using a token") data = json.dumps(request_data) response, info = fetch_url(module, url, data=data, headers=headers, method='POST') if info['status'] != 201: module.fail_json(msg="failed to create the window: %s" % info['msg']) try: json_out = json.loads(response.read()) except: json_out = "" return False, json_out, True
def absent(module, name, user, passwd, token, requester_id, service): url = "https://" + name + ".pagerduty.com/api/v1/maintenance_windows/" + service[0] headers = { 'Authorization': auth_header(user, passwd, token), 'Content-Type' : 'application/json', } request_data = {} if requester_id: request_data['requester_id'] = requester_id else: if token: module.fail_json(msg="requester_id is required when using a token") data = json.dumps(request_data) response, info = fetch_url(module, url, data=data, headers=headers, method='DELETE') if info['status'] != 204: module.fail_json(msg="failed to delete the window: %s" % info['msg']) try: json_out = json.loads(response.read()) except: json_out = "" return False, json_out, True
def download(module, deb): tempdir = os.path.dirname(__file__) package = os.path.join(tempdir, str(deb.rsplit('/', 1)[1])) # When downloading a deb, how much of the deb to download before # saving to a tempfile (64k) BUFSIZE = 65536 try: rsp, info = fetch_url(module, deb, method='GET') if info['status'] != 200: module.fail_json(msg="Failed to download %s, %s" % (deb, info['msg'])) # Ensure file is open in binary mode for Python 3 f = open(package, 'wb') # Read 1kb at a time to save on ram while True: data = rsp.read(BUFSIZE) data = to_bytes(data, errors='surrogate_or_strict') if len(data) < 1: break # End of file, break while loop f.write(data) f.close() deb = package except Exception: e = get_exception() module.fail_json(msg="Failure downloading %s, %s" % (deb, e)) return deb
def query(module, url, check, subscription): headers = { 'Content-Type': 'application/json', } url = url + '/silenced' request_data = { 'check': check, 'subscription': subscription, } # Remove keys with None value for k, v in dict(request_data).items(): if v is None: del request_data[k] response, info = fetch_url( module, url, method='GET', headers=headers, data=json.dumps(request_data) ) if info['status'] == 500: module.fail_json( msg="Failed to query silence %s. Reason: %s" % (subscription, info) ) try: json_out = json.loads(response.read()) except: json_out = "" return False, json_out, False
def send_msg_v2(module, token, room, msg_from, msg, msg_format='text', color='yellow', notify=False, api=NOTIFY_URI_V2): '''sending message to hipchat v2 server''' headers = {'Authorization': 'Bearer %s' % token, 'Content-Type': 'application/json'} body = dict() body['message'] = msg body['color'] = color body['message_format'] = msg_format body['notify'] = notify POST_URL = api + NOTIFY_URI_V2 url = POST_URL.replace('{id_or_name}', urllib.pathname2url(room)) data = json.dumps(body) if module.check_mode: # In check mode, exit before actually sending the message module.exit_json(changed=False) response, info = fetch_url(module, url, data=data, headers=headers, method='POST') # https://www.hipchat.com/docs/apiv2/method/send_room_notification shows # 204 to be the expected result code. if info['status'] in [200, 204]: return response.read() else: module.fail_json(msg="failed to send message, return status=%s" % str(info['status']))
def send_request(self, commands, output='text'): commands = to_list(commands) if self._enable: commands.insert(0, 'enable') body = self._request_builder(commands, output) data = self._module.jsonify(body) headers = {'Content-Type': 'application/json-rpc'} timeout = self._module.params['timeout'] response, headers = fetch_url( self._module, self._url, data=data, headers=headers, method='POST', timeout=timeout ) if headers['status'] != 200: self._module.fail_json(**headers) try: data = response.read() response = self._module.from_json(to_text(data, errors='surrogate_then_replace')) except ValueError: self._module.fail_json(msg='unable to load response from device', data=data) if self._enable and 'result' in response: response['result'].pop(0) return response
def send(self, commands, encoding='json'): """Send commands to the device. """ clist = to_list(commands) if self.enable is not None: clist.insert(0, self.enable) data = self._get_body(clist, encoding) data = self.module.jsonify(data) headers = {'Content-Type': 'application/json-rpc'} response, headers = fetch_url(self.module, self.url, data=data, headers=headers, method='POST') if headers['status'] != 200: self.module.fail_json(**headers) response = self.module.from_json(response.read()) if 'error' in response: err = response['error'] self.module.fail_json(msg='json-rpc error', commands=commands, **err) if self.enable: response['result'].pop(0) return response['result']
def send_msg_v1(module, token, room, msg_from, msg, msg_format='text', color='yellow', notify=False, api=MSG_URI_V1): '''sending message to hipchat v1 server''' params = {} params['room_id'] = room params['from'] = msg_from[:15] # max length is 15 params['message'] = msg params['message_format'] = msg_format params['color'] = color params['api'] = api params['notify'] = int(notify) url = api + MSG_URI_V1 + "?auth_token=%s" % (token) data = urllib.urlencode(params) if module.check_mode: # In check mode, exit before actually sending the message module.exit_json(changed=False) response, info = fetch_url(module, url, data=data) if info['status'] == 200: return response.read() else: module.fail_json(msg="failed to send message, return status=%s" % str(info['status']))
def request(url, user, passwd, timeout, data=None, method=None): if data: data = json.dumps(data) # NOTE: fetch_url uses a password manager, which follows the # standard request-then-challenge basic-auth semantics. However as # JIRA allows some unauthorised operations it doesn't necessarily # send the challenge, so the request occurs as the anonymous user, # resulting in unexpected results. To work around this we manually # inject the basic-auth header up-front to ensure that JIRA treats # the requests as authorized for this user. auth = base64.encodestring('%s:%s' % (user, passwd)).replace('\n', '') response, info = fetch_url(module, url, data=data, method=method, timeout=timeout, headers={'Content-Type': 'application/json', 'Authorization': "Basic %s" % auth}) if info['status'] not in (200, 201, 204): module.fail_json(msg=info['msg']) body = response.read() if body: return json.loads(body) else: return {}
def download(module, deb): tempdir = os.path.dirname(__file__) package = os.path.join(tempdir, str(deb.rsplit('/', 1)[1])) # When downloading a deb, how much of the deb to download before # saving to a tempfile (64k) BUFSIZE = 65536 try: rsp, info = fetch_url(module, deb) f = open(package, 'w') # Read 1kb at a time to save on ram while True: data = rsp.read(BUFSIZE) if data == "": break # End of file, break while loop f.write(data) f.close() deb = package except Exception: e = get_exception() module.fail_json(msg="Failure downloading %s, %s" % (deb, e)) return deb
def get_filtered(self): if self._module.params['resource'] is None: self.fail_module(msg='NITRO resource is undefined.') if self._module.params['filter'] is None: self.fail_module(msg='NITRO filter is undefined.') keys = list(self._module.params['filter'].keys()) filter_key = keys[0] filter_value = self._module.params['filter'][filter_key] filter_str = '%s:%s' % (filter_key, filter_value) url = '%s://%s/nitro/v1/config/%s?filter=%s' % ( self._module.params['nitro_protocol'], self._module.params['nsip'], self._module.params['resource'], filter_str, ) r, info = fetch_url( self._module, url=url, headers=self._headers, method='GET', ) result = {} self.edit_response_data(r, info, result, success_status=200) self.handle_get_return_object(result) self._module_result['changed'] = False return result
def do_request(self, module, url, payload=None, headers=None, method=None): res, info = fetch_url(module, url, data=payload, headers=headers, method=method) if info['status'] != 200: self.module.fail_json(changed=False, msg=info['msg']) return res
def run(self): result = { 'nginx_status_facts': { 'active_connections': None, 'accepts': None, 'handled': None, 'requests': None, 'reading': None, 'writing': None, 'waiting': None, 'data': None, } } (response, info) = fetch_url(module=module, url=self.url, force=True, timeout=self.timeout) if not response: module.fail_json(msg="No valid or no response from url %s within %s seconds (timeout)" % (self.url, self.timeout)) data = response.read() if not data: return result result['nginx_status_facts']['data'] = data match = re.match(r'Active connections: ([0-9]+) \nserver accepts handled requests\n ([0-9]+) ([0-9]+) ([0-9]+) \nReading: ([0-9]+) Writing: ([0-9]+) Waiting: ([0-9]+)', data, re.S) if match: result['nginx_status_facts']['active_connections'] = int(match.group(1)) result['nginx_status_facts']['accepts'] = int(match.group(2)) result['nginx_status_facts']['handled'] = int(match.group(3)) result['nginx_status_facts']['requests'] = int(match.group(4)) result['nginx_status_facts']['reading'] = int(match.group(5)) result['nginx_status_facts']['writing'] = int(match.group(6)) result['nginx_status_facts']['waiting'] = int(match.group(7)) return result
def grafana_delete_dashboard(module, data): # define http headers headers = {'content-type': 'application/json'} if 'grafana_api_key' in data and data['grafana_api_key']: headers['Authorization'] = "Bearer %s" % data['grafana_api_key'] else: auth = base64.b64encode(to_bytes('%s:%s' % (data['grafana_user'], data['grafana_password'])).replace('\n', '')) headers['Authorization'] = 'Basic %s' % auth grafana_switch_organisation(module, data['grafana_url'], data['org_id'], headers) # test if dashboard already exists dashboard_exists, dashboard = grafana_dashboard_exists(module, data['grafana_url'], data['slug'], headers=headers) result = {} if dashboard_exists is True: # delete r, info = fetch_url(module, '%s/api/dashboards/db/%s' % (data['grafana_url'], data['slug']), headers=headers, method='DELETE') if info['status'] == 200: result['msg'] = "Dashboard %s deleted" % data['slug'] result['changed'] = True result['slug'] = data['slug'] else: raise GrafanaAPIException('Unable to update the dashboard %s : %s' % (data['slug'], info)) else: # dashboard does not exist, do nothing result = {'msg': "Dashboard %s does not exist." % data['slug'], 'changed': False, 'slug': data['slug']} return result
def get_existing(self): """ This method is used to get the existing object(s) based on the path specified in the module. Each module should build the URL so that if the object's name is supplied, then it will retrieve the configuration for that particular object, but if no name is supplied, then it will retrieve all MOs for the class. Following this method will ensure that this method can be used to supply the existing configuration when using the get_diff method. The response, status, and existing configuration will be added to the self.result dictionary. """ uri = self.result['url'] + self.result['filter_string'] resp, info = fetch_url(self.module, uri, headers=self.headers, method='GET', timeout=self.params['timeout'], use_proxy=self.params['use_proxy']) self.result['response'] = info['msg'] self.result['status'] = info['status'] self.result['method'] = 'GET' # Handle APIC response if info['status'] == 200: self.result['existing'] = json.loads(resp.read())['imdata'] else: try: # APIC error aci_response_json(self.result, info['body']) self.module.fail_json(msg='Request failed: %(error_code)s %(error_text)s' % self.result, **self.result) except KeyError: # Connection error self.module.fail_json(msg='Request failed for %(url)s. %(msg)s' % info)
def post_config(self): """ This method is used to handle the logic when the modules state is equal to present. The method only pushes a change if the object has differences than what exists on the APIC, and if check_mode is False. A successful change will mark the module as changed. """ if not self.result['config']: return elif not self.module.check_mode: resp, info = fetch_url(self.module, self.result['url'], data=json.dumps(self.result['config']), headers=self.headers, method='POST', timeout=self.params['timeout'], use_proxy=self.params['use_proxy']) self.result['response'] = info['msg'] self.result['status'] = info['status'] self.result['method'] = 'POST' # Handle APIC response if info['status'] == 200: self.result['changed'] = True aci_response_json(self.result, resp.read()) else: try: # APIC error aci_response_json(self.result, info['body']) self.module.fail_json(msg='Request failed: %(error_code)s %(error_text)s' % self.result, **self.result) except KeyError: # Connection error self.module.fail_json(msg='Request failed for %(url)s. %(msg)s' % info) else: self.result['changed'] = True self.result['method'] = 'POST'
def request(self, path, payload=None): ''' Perform a REST request ''' # Ensure method is set (only do this once) self.define_method() # Perform request self.result['url'] = '%(protocol)s://%(hostname)s/' % self.params + path.lstrip('/') resp, info = fetch_url(self.module, self.result['url'], data=payload, headers=self.headers, method=self.params['method'].upper(), timeout=self.params['timeout'], use_proxy=self.params['use_proxy']) self.result['response'] = info['msg'] self.result['status'] = info['status'] # Handle APIC response if info['status'] != 200: try: # APIC error aci_response_json(self.result, info['body']) self.module.fail_json(msg='Request failed: %(error_code)s %(error_text)s' % self.result, **self.result) except KeyError: # Connection error self.module.fail_json(msg='Request failed for %(url)s. %(msg)s' % info) aci_response_json(self.result, resp.read())
def save_config(self): url = '%s://%s/nitro/v1/config/nsconfig?action=save' % ( self._module.params['nitro_protocol'], self._module.params['nsip'], ) data = self._module.jsonify( { 'nsconfig': {}, } ) r, info = fetch_url( self._module, url=url, headers=self._headers, data=data, method='POST', ) result = {} self.edit_response_data(r, info, result, success_status=200) self._module_result['changed'] = False return result
def count(self): if self._module.params['resource'] is None: self.fail_module(msg='NITRO resource is undefined.') url = '%s://%s/nitro/v1/config/%s?count=yes' % ( self._module.params['nitro_protocol'], self._module.params['nsip'], self._module.params['resource'], ) r, info = fetch_url( self._module, url=url, headers=self._headers, method='GET', ) result = {} self.edit_response_data(r, info, result) if result['http_response_body'] != '': data = self._module.from_json(result['http_response_body']) result['nitro_errorcode'] = data['errorcode'] result['nitro_message'] = data['message'] result['nitro_severity'] = data['severity'] if self._module.params['resource'] in data: result['nitro_count'] = data[self._module.params['resource']][0]['__count'] self._module_result['changed'] = False return result
def mas_login(self): url = '%s://%s/nitro/v1/config/login' % ( self._module.params['nitro_protocol'], self._module.params['nsip'], ) login_credentials = { 'login': { 'username': self._module.params['nitro_user'], 'password': self._module.params['nitro_pass'], } } data = 'object=\n%s' % self._module.jsonify(login_credentials) r, info = fetch_url( self._module, url=url, headers=self._headers, data=data, method='POST', ) print(r, info) result = {} self.edit_response_data(r, info, result, success_status=200) if result['nitro_errorcode'] == 0: body_data = self._module.from_json(result['http_response_body']) result['nitro_auth_token'] = body_data['login'][0]['sessionid'] self._module_result['changed'] = False return result
def delete(self): if self._module.params['resource'] is None: self.fail_module(msg='NITRO resource is undefined.') if self._module.params['name'] is None: self.fail_module(msg='NITRO resource is undefined.') # Deletion by name takes precedence over deletion by attributes url = '%s://%s/nitro/v1/config/%s/%s' % ( self._module.params['nitro_protocol'], self._module.params['nsip'], self._module.params['resource'], self._module.params['name'], ) r, info = fetch_url( self._module, url=url, headers=self._headers, method='DELETE', ) result = {} self.edit_response_data(r, info, result, success_status=200) if result['nitro_errorcode'] == 0: self._module_result['changed'] = True else: self._module_result['changed'] = False return result
def delete_by_args(self): if self._module.params['resource'] is None: self.fail_module(msg='NITRO resource is undefined.') if self._module.params['args'] is None: self.fail_module(msg='NITRO args is undefined.') url = '%s://%s/nitro/v1/config/%s' % ( self._module.params['nitro_protocol'], self._module.params['nsip'], self._module.params['resource'], ) args_dict = self._module.params['args'] args = ','.join(['%s:%s' % (k, args_dict[k]) for k in args_dict]) args = 'args=' + args url = '?'.join([url, args]) r, info = fetch_url( self._module, url=url, headers=self._headers, method='DELETE', ) result = {} self.edit_response_data(r, info, result, success_status=200) if result['nitro_errorcode'] == 0: self._module_result['changed'] = True else: self._module_result['changed'] = False return result
def login(self): ''' Log in to APIC ''' # Ensure protocol is set (only do this once) self.define_protocol() # Perform login request url = '%(protocol)s://%(hostname)s/api/aaaLogin.json' % self.params payload = {'aaaUser': {'attributes': {'name': self.params['username'], 'pwd': self.params['password']}}} resp, auth = fetch_url(self.module, url, data=json.dumps(payload), method='POST', timeout=self.params['timeout'], use_proxy=self.params['use_proxy']) # Handle APIC response if auth['status'] != 200: self.result['response'] = auth['msg'] self.result['status'] = auth['status'] try: # APIC error aci_response_json(self.result, auth['body']) self.module.fail_json(msg='Authentication failed: %(error_code)s %(error_text)s' % self.result, **self.result) except KeyError: # Connection error self.module.fail_json(msg='Authentication failed for %(url)s. %(msg)s' % auth) # Retain cookie for later use self.headers = dict(Cookie=resp.headers['Set-Cookie'])
def test_fetch_url_nossl(open_url_mock, fake_ansible_module, mocker): mocker.patch('ansible.module_utils.urls.get_distribution', return_value='notredhat') open_url_mock.side_effect = NoSSLError with pytest.raises(FailJson) as excinfo: fetch_url(fake_ansible_module, 'http://ansible.com/') assert 'python-ssl' not in excinfo.value.kwargs['msg'] mocker.patch('ansible.module_utils.urls.get_distribution', return_value='redhat') open_url_mock.side_effect = NoSSLError with pytest.raises(FailJson) as excinfo: fetch_url(fake_ansible_module, 'http://ansible.com/') assert 'python-ssl' in excinfo.value.kwargs['msg']
def query(self, path): ''' Perform a query with no payload ''' url = '%(protocol)s://%(hostname)s/' % self.params + path.lstrip('/') resp, query = fetch_url(self.module, url, data=None, headers=self.headers, method='GET', timeout=self.params['timeout'], use_proxy=self.params['use_proxy']) # Handle APIC response if query['status'] != 200: self.result['response'] = query['msg'] self.result['status'] = query['status'] try: # APIC error aci_response_json(self.result, query['body']) self.module.fail_json(msg='Query failed: %(error_code)s %(error_text)s' % self.result, **self.result) except KeyError: # Connection error self.module.fail_json(msg='Query failed for %(url)s. %(msg)s' % query) query = json.loads(resp.read()) return json.dumps(query['imdata'], sort_keys=True, indent=2) + '\n'
def uri(module, url, dest, body, body_format, method, headers, socket_timeout): # is dest is set and is a directory, let's check if we get redirected and # set the filename from that url redirected = False redir_info = {} r = {} src = module.params['src'] if src: try: headers.update({'Content-Length': os.stat(src).st_size}) data = open(src, 'rb') except OSError: module.fail_json(msg='Unable to open source file %s' % src, elapsed=0) else: data = body kwargs = {} if dest is not None: # Stash follow_redirects, in this block we don't want to follow # we'll reset back to the supplied value soon follow_redirects = module.params['follow_redirects'] module.params['follow_redirects'] = False if os.path.isdir(dest): # first check if we are redirected to a file download _, redir_info = fetch_url(module, url, data=body, headers=headers, method=method, timeout=socket_timeout, unix_socket=module.params['unix_socket']) # if we are redirected, update the url with the location header, # and update dest with the new url filename if redir_info['status'] in (301, 302, 303, 307): url = redir_info['location'] redirected = True dest = os.path.join(dest, url_filename(url)) # if destination file already exist, only download if file newer if os.path.exists(dest): kwargs['last_mod_time'] = datetime.datetime.utcfromtimestamp( os.path.getmtime(dest)) # Reset follow_redirects back to the stashed value module.params['follow_redirects'] = follow_redirects resp, info = fetch_url(module, url, data=data, headers=headers, method=method, timeout=socket_timeout, unix_socket=module.params['unix_socket'], **kwargs) try: content = resp.read() except AttributeError: # there was no content, but the error read() # may have been stored in the info as 'body' content = info.pop('body', '') if src: # Try to close the open file handle try: data.close() except Exception: pass r['redirected'] = redirected or info['url'] != url r.update(redir_info) r.update(info) return r, content, dest
def main(): module = AnsibleModule( argument_spec=dict( component=dict(required=True, aliases=['name']), version=dict(required=True), token=dict(required=True, no_log=True), state=dict(required=True, choices=['started', 'finished', 'failed']), hosts=dict(required=False, default=[socket.gethostname()], aliases=['host']), env=dict(required=False), owner=dict(required=False), description=dict(required=False), message=dict(required=False), source_system=dict(required=False, default='ansible'), validate_certs=dict(default='yes', type='bool'), url=dict(required=False, default='https://api.bigpanda.io'), ), supports_check_mode=True, ) token = module.params['token'] state = module.params['state'] url = module.params['url'] # Build the common request body body = dict() for k in ('component', 'version', 'hosts'): v = module.params[k] if v is not None: body[k] = v if not isinstance(body['hosts'], list): body['hosts'] = [body['hosts']] # Insert state-specific attributes to body if state == 'started': for k in ('source_system', 'env', 'owner', 'description'): v = module.params[k] if v is not None: body[k] = v request_url = url + '/data/events/deployments/start' else: message = module.params['message'] if message is not None: body['errorMessage'] = message if state == 'finished': body['status'] = 'success' else: body['status'] = 'failure' request_url = url + '/data/events/deployments/end' # Build the deployment object we return deployment = dict(token=token, url=url) deployment.update(body) if 'errorMessage' in deployment: message = deployment.pop('errorMessage') deployment['message'] = message # If we're in check mode, just exit pretending like we succeeded if module.check_mode: module.exit_json(changed=True, **deployment) # Send the data to bigpanda data = json.dumps(body) headers = { 'Authorization': 'Bearer %s' % token, 'Content-Type': 'application/json' } try: response, info = fetch_url(module, request_url, data=data, headers=headers) if info['status'] == 200: module.exit_json(changed=True, **deployment) else: module.fail_json(msg=json.dumps(info)) except Exception as e: module.fail_json(msg=to_native(e), exception=traceback.format_exc())
def api_query(self, path="/", method="GET", data=None): url = self.api_config['api_endpoint'] + path if data: data_encoded = dict() data_list = "" for k, v in data.items(): if isinstance(v, list): for s in v: try: data_list += '&%s[]=%s' % (k, urllib.quote(s)) except AttributeError: data_list += '&%s[]=%s' % (k, urllib.parse.quote(s)) elif v is not None: data_encoded[k] = v try: data = urllib.urlencode(data_encoded) + data_list except AttributeError: data = urllib.parse.urlencode(data_encoded) + data_list retry_max_delay = self.api_config['api_retry_max_delay'] randomness = random.randint(0, 1000) / 1000.0 for retry in range(0, self.api_config['api_retries']): response, info = fetch_url( module=self.module, url=url, data=data, method=method, headers=self.headers, timeout=self.api_config['api_timeout'], ) if info.get('status') == 200: break # Vultr has a rate limiting requests per second, try to be polite # Use exponential backoff plus a little bit of randomness delay = 2**retry + randomness if delay > retry_max_delay: delay = retry_max_delay + randomness time.sleep(delay) else: self.fail_json( msg= "Reached API retries limit %s for URL %s, method %s with data %s. Returned %s, with body: %s %s" % (self.api_config['api_retries'], url, method, data, info['status'], info['msg'], info.get('body'))) if info.get('status') != 200: self.fail_json( msg= "URL %s, method %s with data %s. Returned %s, with body: %s %s" % (url, method, data, info['status'], info['msg'], info.get('body'))) res = response.read() if not res: return {} try: return self.module.from_json(to_native(res)) or {} except ValueError as e: self.module.fail_json( msg="Could not process response into json: %s" % e)
def _cf_simple_api_call(self, api_call, method='GET', payload=None): headers = { 'X-Auth-Email': self.account_email, 'X-Auth-Key': self.account_api_token, 'Content-Type': 'application/json' } data = None if payload: try: data = json.dumps(payload) except Exception as e: self.module.fail_json( msg="Failed to encode payload as JSON: %s " % to_native(e)) resp, info = fetch_url(self.module, self.cf_api_endpoint + api_call, headers=headers, data=data, method=method, timeout=self.timeout) if info['status'] not in [200, 304, 400, 401, 403, 429, 405, 415]: self.module.fail_json( msg="Failed API call {0}; got unexpected HTTP code {1}".format( api_call, info['status'])) error_msg = '' if info['status'] == 401: # Unauthorized error_msg = "API user does not have permission; Status: {0}; Method: {1}: Call: {2}".format( info['status'], method, api_call) elif info['status'] == 403: # Forbidden error_msg = "API request not authenticated; Status: {0}; Method: {1}: Call: {2}".format( info['status'], method, api_call) elif info['status'] == 429: # Too many requests error_msg = "API client is rate limited; Status: {0}; Method: {1}: Call: {2}".format( info['status'], method, api_call) elif info['status'] == 405: # Method not allowed error_msg = "API incorrect HTTP method provided; Status: {0}; Method: {1}: Call: {2}".format( info['status'], method, api_call) elif info['status'] == 415: # Unsupported Media Type error_msg = "API request is not valid JSON; Status: {0}; Method: {1}: Call: {2}".format( info['status'], method, api_call) elif info['status'] == 400: # Bad Request error_msg = "API bad request; Status: {0}; Method: {1}: Call: {2}".format( info['status'], method, api_call) result = None try: content = resp.read() except AttributeError: if info['body']: content = info['body'] else: error_msg += "; The API response was empty" if content: try: result = json.loads( to_text(content, errors='surrogate_or_strict')) except (getattr(json, 'JSONDecodeError', ValueError)) as e: error_msg += "; Failed to parse API response with error {0}: {1}".format( to_native(e), content) # Without a valid/parsed JSON response no more error processing can be done if result is None: self.module.fail_json(msg=error_msg) if not result['success']: error_msg += "; Error details: " for error in result['errors']: error_msg += "code: {0}, error: {1}; ".format( error['code'], error['message']) if 'error_chain' in error: for chain_error in error['error_chain']: error_msg += "code: {0}, error: {1}; ".format( chain_error['code'], chain_error['message']) self.module.fail_json(msg=error_msg) return result, info['status']
def copy(self): result = dict(changed=True, uuid=self.vm.summary.config.uuid) vm_username = self.module.params['vm_username'] vm_password = self.module.params['vm_password'] hostname = self.module.params['hostname'] overwrite = self.module.params["copy"]["overwrite"] dest = self.module.params["copy"]['dest'] src = self.module.params['copy']['src'] b_src = to_bytes(src, errors='surrogate_or_strict') if not os.path.exists(b_src): self.module.fail_json(msg="Source %s not found" % src) if not os.access(b_src, os.R_OK): self.module.fail_json(msg="Source %s not readable" % src) if os.path.isdir(b_src): self.module.fail_json( msg="copy does not support copy of directory: %s" % src) data = None with open(b_src, "rb") as local_file: data = local_file.read() file_size = os.path.getsize(b_src) creds = vim.vm.guest.NamePasswordAuthentication(username=vm_username, password=vm_password) file_attributes = vim.vm.guest.FileManager.FileAttributes() file_manager = self.content.guestOperationsManager.fileManager try: url = file_manager.InitiateFileTransferToGuest( vm=self.vm, auth=creds, guestFilePath=dest, fileAttributes=file_attributes, overwrite=overwrite, fileSize=file_size) url = url.replace("*", hostname) resp, info = urls.fetch_url(self.module, url, data=data, method="PUT") status_code = info["status"] if status_code != 200: self.module.fail_json( msg='problem during file transfer, http message:%s' % info, uuid=self.vm.summary.config.uuid) except vim.fault.FileAlreadyExists: result['changed'] = False result['msg'] = "Guest file %s already exists" % dest return result except vim.fault.FileFault as e: self.module.fail_json(msg="FileFault:%s" % to_native(e.msg), uuid=self.vm.summary.config.uuid) except vim.fault.GuestPermissionDenied as permission_denied: self.module.fail_json(msg="Permission denied to copy file into " "destination %s : %s" % (dest, to_native(permission_denied.msg)), uuid=self.vm.summary.config.uuid) except vim.fault.InvalidGuestLogin as invalid_guest_login: self.module.fail_json( msg="Invalid guest login for user" " %s : %s" % (vm_username, to_native(invalid_guest_login.msg))) # other exceptions except Exception as e: self.module.fail_json( msg="Failed to Copy file to Vm VMware exception : %s" % to_native(e), uuid=self.vm.summary.config.uuid) return result
def grafana_create_dashboard(module, data): # define data payload for grafana API try: with open(data['path'], 'r') as json_file: payload = json.load(json_file) except Exception as e: raise GrafanaAPIException("Can't load json file %s" % to_native(e)) # Check that the dashboard JSON is nested under the 'dashboard' key if 'dashboard' not in payload: payload = {'dashboard': payload} # define http header headers = grafana_headers(module, data) grafana_version = get_grafana_version(module, data['grafana_url'], headers) if grafana_version < 5: if data.get('slug'): uid = data['slug'] elif 'meta' in payload and 'slug' in payload['meta']: uid = payload['meta']['slug'] else: raise GrafanaMalformedJson( 'No slug found in json. Needed with grafana < 5') else: if data.get('uid'): uid = data['uid'] elif 'uid' in payload['dashboard']: uid = payload['dashboard']['uid'] else: uid = None result = {} # test if the folder exists if grafana_version >= 5: folder_exists, folder_id = grafana_folder_exists( module, data['grafana_url'], data['folder'], headers) if folder_exists is False: result['msg'] = "Dashboard folder '%s' does not exist." % data[ 'folder'] result['uid'] = uid result['changed'] = False return result payload['folderId'] = folder_id # test if dashboard already exists if uid: dashboard_exists, dashboard = grafana_dashboard_exists( module, data['grafana_url'], uid, headers=headers) else: dashboard_exists, dashboard = grafana_dashboard_search( module, data['grafana_url'], folder_id, payload['dashboard']['title'], headers=headers) if dashboard_exists is True: if grafana_dashboard_changed(payload, dashboard): # update if 'overwrite' in data and data['overwrite']: payload['overwrite'] = True if 'message' in data and data['message']: payload['message'] = data['message'] r, info = fetch_url(module, '%s/api/dashboards/db' % data['grafana_url'], data=json.dumps(payload), headers=headers, method='POST') if info['status'] == 200: if grafana_version >= 5: try: dashboard = json.loads(r.read()) uid = dashboard['uid'] except Exception as e: raise GrafanaAPIException(e) result['uid'] = uid result['msg'] = "Dashboard %s updated" % payload['dashboard'][ 'title'] result['changed'] = True else: body = json.loads(info['body']) raise GrafanaAPIException( 'Unable to update the dashboard %s : %s (HTTP: %d)' % (uid, body['message'], info['status'])) else: # unchanged result['uid'] = uid result['msg'] = "Dashboard %s unchanged." % payload['dashboard'][ 'title'] result['changed'] = False else: # create if folder_exists is True: payload['folderId'] = folder_id r, info = fetch_url(module, '%s/api/dashboards/db' % data['grafana_url'], data=json.dumps(payload), headers=headers, method='POST') if info['status'] == 200: result[ 'msg'] = "Dashboard %s created" % payload['dashboard']['title'] result['changed'] = True if grafana_version >= 5: try: dashboard = json.loads(r.read()) uid = dashboard['uid'] except Exception as e: raise GrafanaAPIException(e) result['uid'] = uid else: raise GrafanaAPIException( 'Unable to create the new dashboard %s : %s - %s.' % (payload['dashboard']['title'], info['status'], info)) return result
def main(): argument_spec = aci_argument_spec() argument_spec.update( path=dict(type='str', required=True, aliases=['uri']), method=dict(type='str', default='get', choices=['delete', 'get', 'post'], aliases=['action']), src=dict(type='path', aliases=['config_file']), content=dict(type='raw'), ) module = AnsibleModule( argument_spec=argument_spec, mutually_exclusive=[['content', 'src']], ) content = module.params['content'] path = module.params['path'] src = module.params['src'] # Report missing file file_exists = False if src: if os.path.isfile(src): file_exists = True else: module.fail_json(msg="Cannot find/access src '%s'" % src) # Find request type if path.find('.xml') != -1: rest_type = 'xml' if not HAS_LXML_ETREE: module.fail_json( msg= 'The lxml python library is missing, or lacks etree support.') if not HAS_XMLJSON_COBRA: module.fail_json( msg= 'The xmljson python library is missing, or lacks cobra support.' ) elif path.find('.json') != -1: rest_type = 'json' else: module.fail_json( msg='Failed to find REST API payload type (neither .xml nor .json).' ) aci = ACIRESTModule(module) aci.result['status'] = -1 # Ensure we always return a status # We include the payload as it may be templated payload = content if file_exists: with open(src, 'r') as config_object: # TODO: Would be nice to template this, requires action-plugin payload = config_object.read() # Validate payload if rest_type == 'json': if content and isinstance(content, dict): # Validate inline YAML/JSON payload = json.dumps(payload) elif payload and isinstance(payload, str) and HAS_YAML: try: # Validate YAML/JSON string payload = json.dumps(yaml.safe_load(payload)) except Exception as e: module.fail_json( msg='Failed to parse provided JSON/YAML payload: %s' % to_text(e), exception=to_text(e), payload=payload) elif rest_type == 'xml' and HAS_LXML_ETREE: if content and isinstance(content, dict) and HAS_XMLJSON_COBRA: # Validate inline YAML/JSON # FIXME: Converting from a dictionary to XML is unsupported at this time # payload = etree.tostring(payload) pass elif payload and isinstance(payload, str): try: # Validate XML string payload = lxml.etree.tostring(lxml.etree.fromstring(payload)) except Exception as e: module.fail_json( msg='Failed to parse provided XML payload: %s' % to_text(e), payload=payload) # Perform actual request using auth cookie (Same as aci.request(), but also supports XML) if 'port' in aci.params and aci.params['port'] is not None: aci.url = '%(protocol)s://%(host)s:%(port)s/' % aci.params + path.lstrip( '/') else: aci.url = '%(protocol)s://%(host)s/' % aci.params + path.lstrip('/') if aci.params['method'] != 'get': path += '?rsp-subtree=modified' aci.url = update_qsl(aci.url, {'rsp-subtree': 'modified'}) # Sign and encode request as to APIC's wishes if aci.params['private_key'] is not None: aci.cert_auth(path=path, payload=payload) aci.method = aci.params['method'].upper() # Perform request resp, info = fetch_url(module, aci.url, data=payload, headers=aci.headers, method=aci.method, timeout=aci.params['timeout'], use_proxy=aci.params['use_proxy']) aci.response = info['msg'] aci.status = info['status'] # Report failure if info['status'] != 200: try: # APIC error aci.response_type(info['body'], rest_type) aci.fail_json(msg='APIC Error %(code)s: %(text)s' % aci.error) except KeyError: # Connection error aci.fail_json(msg='Connection failed for %(url)s. %(msg)s' % info) aci.response_type(resp.read(), rest_type) aci.result['imdata'] = aci.imdata aci.result['totalCount'] = aci.totalCount # Report success aci.exit_json(**aci.result)
def run_module(): # define available arguments/parameters a user can pass to the module module_args = dict(server_name=dict(type='str', required=True), server_username=dict(type='str', required=True), server_password=dict(type='str', required=True, no_log=True), server_port=dict(type='str', default='9419'), state=dict(type="str", choices=("absent", "present"), default="present"), id=dict(type='str', required=False), username=dict(type='str', required=False), password=dict(type='str', required=False, no_log=True), type=dict(type='str', choices=("Windows", "Linux", "Standard"), default='Standard'), description=dict(type='str', required=False), validate_certs=dict(type='bool', default='false')) required_if_args = [["state", "present", ["username", "password"]], ["state", "absent", ["id"]]] required_together_args = [["password", "username"]] # seed the result dict in the object # we primarily care about changed and state # changed is if this module effectively modified the target # state will include any data that you want your module to pass back # for consumption, for example, in a subsequent task result = dict(changed=False) # the AnsibleModule object will be our abstraction working with Ansible # this includes instantiation, a couple of common attr would be the # args/params passed to the execution, as well as if the module # supports check mode module = AnsibleModule(argument_spec=module_args, required_if=required_if_args, required_together=required_together_args, supports_check_mode=False) # General apiversion = '1.0-rev1' state = module.params['state'] request_server = module.params['server_name'] request_port = module.params['server_port'] # Authenticate request_username = module.params['server_username'] request_password = module.params['server_password'] payload = 'grant_type=password&username='******'&password='******'accept': 'application/json', 'x-api-version': apiversion, 'Content-Type': 'application/x-www-form-urlencoded', 'Authorization': 'true' } request_url = 'https://' + request_server + ':' + request_port + '/api/oauth2/token' method = "Post" req, info = fetch_url(module, request_url, headers=headers, method=method, data=payload) if info['status'] != 200: module.fail_json(msg="Fail: %s" % ("Status: " + str(info['status']) + ", Message: " + str(info['msg']))) try: resp = json.loads(req.read()) except AttributeError: module.fail_json(msg='Parsing Response Failed', **result) # Payload if state == 'present': username = module.params['username'] password = module.params['password'] credtype = module.params['type'] description = module.params['description'] body = { 'type': credtype, 'username': username, 'password': password, 'description': description } bodyjson = json.dumps(body) headers = { 'x-api-version': apiversion, 'Authorization': 'Bearer ' + resp['access_token'], 'Content-Type': 'application/json' } request_url = 'https://' + request_server + ':' + request_port + '/api/v1/credentials' method = "Post" req, info = fetch_url(module, request_url, headers=headers, method=method, data=bodyjson) if info['status'] != 200: module.fail_json(msg="Fail: %s" % ("Status: " + str(info['status']) + ", Message: " + str(info['msg']))) try: result['msg'] = json.loads(req.read()) result['changed'] = True except AttributeError: module.fail_json(msg='Parsing Response Failed', **result) if state == 'absent': credid = module.params['id'] headers = { 'x-api-version': '1.0-rev1', 'Authorization': 'Bearer ' + resp['access_token'] } request_url = 'https://' + request_server + ':' + request_port + '/api/v1/credentials/' + credid method = "get" req, info = fetch_url(module, request_url, headers=headers, method=method) if info['status'] == 200: method = "Delete" req, info = fetch_url(module, request_url, headers=headers, method=method) if info['status'] != 200: module.fail_json(msg="Fail: %s" % ("Status: " + str(info['status']) + ", Message: " + str(info['msg']))) try: result['changed'] = True except AttributeError: module.fail_json(msg='Parsing Response Failed', **result) module.exit_json(**result)
def main(): module = AnsibleModule(argument_spec=dict( username=dict(type='str', required=True), password=dict(type='str', required=True, no_log=True), service_name=dict(type='str', required=True), mcs_url=dict(type='str', required=True), mcs_port=dict(type='str', default='8443', required=False), state=dict(type='str', required=True), validate_certs=dict(type='bool', default='False'), )) maprUsername = module.params['username'] maprPassword = module.params['password'] serviceName = module.params['service_name'].lower() serviceState = module.params['state'].lower() mcsUrl = module.params['mcs_url'] mcsPort = module.params['mcs_port'] mapr_default_service_state = ['start', 'stop', 'restart'] # Hack to add basic auth username and password the way fetch_url expects module.params['url_username'] = maprUsername module.params['url_password'] = maprPassword def get_current_hostname(): cmd = module.get_bin_path('hostname', True) rc, out, err = module.run_command(cmd) if rc != 0: module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" % (rc, out, err)) return out.strip() if not maprUsername or not maprPassword: module.fail_json(msg="Username and Password should be defined") elif not serviceName or not serviceState: module.fail_json(msg="Service Name & Service State should be defined") elif not mcsUrl: module.fail_json(msg="MCS Url Should be Defined") elif serviceState not in mapr_default_service_state: module.fail_json(msg="state should be start/stop/restart only") else: host = get_current_hostname() url_parameters = "?action=" + serviceState + "&nodes=" + \ str(host) + "&name=" + serviceName complete_url = "https://" + mcsUrl + ":" + mcsPort + \ "/rest/node/services" + url_parameters headers = {'Content-Type': 'application/json'} (resp, info) = fetch_url(module, complete_url, headers=headers, method='GET') if info['status'] >= 400: module.fail_json(msg="Unauthorized Access to MapR Services") elif info['status'] == 200: body = json.loads(resp.read()) if body['status'] == 'ERROR': module.fail_json(msg=body['errors'][0]['desc']) else: module.exit_json(changed=True) else: module.fail_json(msg="Unknown Response from MapR API: %s" % resp.read())
def grafana_create_datasource(module, data): # define data payload for grafana API payload = {'orgId': data['org_id'], 'name': data['name'], 'type': data['ds_type'], 'access': data['access'], 'url': data['url'], 'database': data['database'], 'withCredentials': data['with_credentials'], 'isDefault': data['is_default'], 'user': data['user'], 'password': data['password']} # define basic auth if 'basic_auth_user' in data and data['basic_auth_user'] and 'basic_auth_password' in data and data['basic_auth_password']: payload['basicAuth'] = True payload['basicAuthUser'] = data['basic_auth_user'] payload['basicAuthPassword'] = data['basic_auth_password'] else: payload['basicAuth'] = False # define tls auth json_data = {} if data.get('tls_client_cert') and data.get('tls_client_key'): json_data['tlsAuth'] = True if data.get('tls_ca_cert'): payload['secureJsonData'] = { 'tlsCACert': data['tls_ca_cert'], 'tlsClientCert': data['tls_client_cert'], 'tlsClientKey': data['tls_client_key'] } json_data['tlsAuthWithCACert'] = True else: payload['secureJsonData'] = { 'tlsClientCert': data['tls_client_cert'], 'tlsClientKey': data['tls_client_key'] } else: json_data['tlsAuth'] = False json_data['tlsAuthWithCACert'] = False # datasource type related parameters if data['ds_type'] == 'elasticsearch': json_data['esVersion'] = data['es_version'] json_data['timeField'] = data['time_field'] if data.get('interval'): json_data['interval'] = data['interval'] if data['es_version'] >= 56: json_data['maxConcurrentShardRequests'] = data['max_concurrent_shard_requests'] if data['ds_type'] == 'elasticsearch' or data['ds_type'] == 'influxdb': if data.get('time_interval'): json_data['timeInterval'] = data['time_interval'] if data['ds_type'] == 'opentsdb': json_data['tsdbVersion'] = data['tsdb_version'] if data['tsdb_resolution'] == 'second': json_data['tsdbResolution'] = 1 else: json_data['tsdbResolution'] = 2 if data['ds_type'] == 'postgres': json_data['sslmode'] = data['sslmode'] payload['jsonData'] = json_data # define http header headers = {'content-type': 'application/json; charset=utf8'} if 'grafana_api_key' in data and data['grafana_api_key'] is not None: headers['Authorization'] = "Bearer %s" % data['grafana_api_key'] else: auth = base64.encodestring('%s:%s' % (data['grafana_user'], data['grafana_password'])).replace('\n', '') headers['Authorization'] = 'Basic %s' % auth grafana_switch_organisation(module, data['grafana_url'], data['org_id'], headers) # test if datasource already exists datasource_exists, ds = grafana_datasource_exists(module, data['grafana_url'], data['name'], headers=headers) result = {} if datasource_exists is True: del ds['typeLogoUrl'] if ds['basicAuth'] is False: del ds['basicAuthUser'] del ds['basicAuthPassword'] if 'jsonData' in ds: if 'tlsAuth' in ds['jsonData'] and ds['jsonData']['tlsAuth'] is False: del ds['secureJsonFields'] if 'tlsAuth' not in ds['jsonData']: del ds['secureJsonFields'] payload['id'] = ds['id'] if ds == payload: # unchanged result['name'] = data['name'] result['id'] = ds['id'] result['msg'] = "Datasource %s unchanged." % data['name'] result['changed'] = False else: # update r, info = fetch_url(module, '%s/api/datasources/%d' % (data['grafana_url'], ds['id']), data=json.dumps(payload), headers=headers, method='PUT') if info['status'] == 200: res = json.loads(r.read()) result['name'] = data['name'] result['id'] = ds['id'] result['before'] = ds result['after'] = payload result['msg'] = "Datasource %s updated %s" % (data['name'], res['message']) result['changed'] = True else: raise GrafanaAPIException('Unable to update the datasource id %d : %s' % (ds['id'], info)) else: # create r, info = fetch_url(module, '%s/api/datasources' % data['grafana_url'], data=json.dumps(payload), headers=headers, method='POST') if info['status'] == 200: res = json.loads(r.read()) result['msg'] = "Datasource %s created : %s" % (data['name'], res['message']) result['changed'] = True result['name'] = data['name'] result['id'] = res['id'] else: raise GrafanaAPIException('Unable to create the new datasource %s : %s - %s.' % (data['name'], info['status'], info)) return result
def http_deploy(module, deployed): if deployed: data_dict = { 'operation': 'read-resource', 'address': { 'deployment': module.params['deployment'] }, 'include-runtime': True } headers = { 'Content-Type': 'application/json' } data = json.dumps(data_dict) resp, info = fetch_url(module, 'http://%s:%s/management' % (module.params['hostname'], module.params['port']), data=data, headers=headers) try: assert info['status'] == 200 except AssertionError: module.fail_json(msg=info) resp_json = json.loads(resp.read()) # Process existing deployment hash deployment_hash_dict = (key for key in resp_json['result']['content'] if 'hash' in key).next() deployment_hash_base64 = deployment_hash_dict['hash']['BYTES_VALUE'] deployment_hash_hex = deployment_hash_base64.decode('base64').encode('hex') if module.sha1(module.params['src']) != deployment_hash_hex: import requests resp = requests.post( 'http://%s:%s/management/add-content' % (module.params['hostname'], module.params['port']), files={'file': open(module.params['src'], 'rb')}, auth=requests.auth.HTTPDigestAuth(module.params['url_username'], module.params['url_password']) ) if resp.status_code < 400: return True else: module.fail_json(msg={'status': 'HTTP %s %s' % (resp.status_code, resp.reason)}) else: headers = { 'Content-Type': 'application/json' } import requests auth = requests.auth.HTTPDigestAuth(module.params['url_username'], module.params['url_password']) resp = requests.post( 'http://%s:%s/management/add-content' % (module.params['hostname'], module.params['port']), files={'file': open(module.params['src'], 'rb')}, auth=auth ) try: assert resp.status_code == 200 except AssertionError: module.fail_json(msg=resp.text) resp_json = resp.json() data_dict = { 'operation': 'add', 'address': [{ 'deployment': module.params['deployment'] }], 'enabled': True, 'content': [{'hash': {'BYTES_VALUE': resp_json['result']['BYTES_VALUE']}}] } resp = requests.post( 'http://%s:%s/management' % (module.params['hostname'], module.params['port']), json=data_dict, headers=headers, auth=auth ) try: assert resp.status_code == 200 except AssertionError: module.fail_json(msg=resp.text) return True return False
def intersight_call(self, http_method="", resource_path="", query_params=None, body=None, moid=None, name=None): """ Invoke the Intersight API :param resource_path: intersight resource path e.g. '/ntp/Policies' :param query_params: dictionary object with query string parameters as key/value pairs :param body: dictionary object with intersight data :param moid: intersight object moid :param name: intersight object name :return: json http response object """ target_host = urlparse(self.host).netloc target_path = urlparse(self.host).path query_path = "" method = http_method.upper() bodyString = "" # Verify an accepted HTTP verb was chosen if (method not in ['GET', 'POST', 'PATCH', 'DELETE']): raise ValueError( 'Please select a valid HTTP verb (GET/POST/PATCH/DELETE)') # Verify the resource path isn't empy & is a valid <str> object if (resource_path != "" and not (resource_path, str)): raise TypeError( 'The *resource_path* value is required and must be of type "<str>"' ) # Verify the query parameters isn't empy & is a valid <dict> object if (query_params is not None and not isinstance(query_params, dict)): raise TypeError( 'The *query_params* value must be of type "<dict>"') # Verify the MOID is not null & of proper length if (moid is not None and len(moid.encode('utf-8')) != 24): raise ValueError('Invalid *moid* value!') # Check for query_params, encode, and concatenate onto URL if query_params: query_path = "?" + urlencode(query_params) # Handle PATCH/DELETE by Object "name" instead of "moid" if method in ('PATCH', 'DELETE'): if moid is None: if name is not None: if isinstance(name, str): moid = self.get_moid_by_name(resource_path, name) else: raise TypeError( 'The *name* value must be of type "<str>"') else: raise ValueError( 'Must set either *moid* or *name* with "PATCH/DELETE!"' ) # Check for moid and concatenate onto URL if moid is not None: resource_path += "/" + moid # Check for GET request to properly form body if method != "GET": bodyString = json.dumps(body) # Concatenate URLs for headers target_url = self.host + resource_path + query_path request_target = method.lower( ) + " " + target_path + resource_path + query_path # Get the current GMT Date/Time cdate = get_gmt_date() # Generate the body digest body_digest = get_sha256_digest(bodyString) b64_body_digest = b64encode(body_digest.digest()) # Generate the authorization header auth_header = { 'Host': target_host, 'Date': cdate, 'Digest': "SHA-256=" + b64_body_digest.decode('ascii'), } string_to_sign = prepare_str_to_sign(request_target, auth_header) b64_signed_msg = self.get_sig_b64encode(string_to_sign) auth_header = self.get_auth_header(auth_header, b64_signed_msg) # Generate the HTTP requests header request_header = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'Host': '{0}'.format(target_host), 'Date': '{0}'.format(cdate), 'Digest': 'SHA-256={0}'.format(b64_body_digest.decode('ascii')), 'Authorization': '{0}'.format(auth_header), } response, info = fetch_url(self.module, target_url, data=bodyString, headers=request_header, method=method, use_proxy=self.module.params['use_proxy']) return response, info
def main(): module = AnsibleModule(argument_spec=dict( subscription=dict(required=True), token=dict(required=True, no_log=True), room=dict(required=True), msg=dict(required=True), notify=dict(required=False, choices=[ "56k", "bell", "bezos", "bueller", "clowntown", "cottoneyejoe", "crickets", "dadgummit", "dangerzone", "danielsan", "deeper", "drama", "greatjob", "greyjoy", "guarantee", "heygirl", "horn", "horror", "inconceivable", "live", "loggins", "makeitso", "noooo", "nyan", "ohmy", "ohyeah", "pushit", "rimshot", "rollout", "rumble", "sax", "secret", "sexyback", "story", "tada", "tmyk", "trololo", "trombone", "unix", "vuvuzela", "what", "whoomp", "yeah", "yodel" ]), ), supports_check_mode=False) subscription = module.params["subscription"] token = module.params["token"] room = module.params["room"] msg = module.params["msg"] notify = module.params["notify"] URI = "https://%s.campfirenow.com" % subscription NSTR = "<message><type>SoundMessage</type><body>%s</body></message>" MSTR = "<message><body>%s</body></message>" AGENT = "Ansible/1.2" # Hack to add basic auth username and password the way fetch_url expects module.params['url_username'] = token module.params['url_password'] = '******' target_url = '%s/room/%s/speak.xml' % (URI, room) headers = {'Content-Type': 'application/xml', 'User-agent': AGENT} # Send some audible notification if requested if notify: response, info = fetch_url(module, target_url, data=NSTR % html_escape(notify), headers=headers) if info['status'] not in [200, 201]: module.fail_json(msg="unable to send msg: '%s', campfire api" " returned error code: '%s'" % (notify, info['status'])) # Send the message response, info = fetch_url(module, target_url, data=MSTR % html_escape(msg), headers=headers) if info['status'] not in [200, 201]: module.fail_json(msg="unable to send msg: '%s', campfire api" " returned error code: '%s'" % (msg, info['status'])) module.exit_json(changed=True, room=room, msg=msg, notify=notify)
def grafana_switch_organisation(module, grafana_url, org_id, headers): r, info = fetch_url(module, '%s/api/user/using/%d' % (grafana_url, org_id), headers=headers, method='POST') if info['status'] != 200: raise GrafanaAPIException('Unable to switch to organization %s : %s' % (org_id, info))
def update(module, base_url, headers, stream_id, title, description, remove_matches_from_default_stream, matching_type, rules, index_set_id): url = "/".join([base_url, stream_id]) payload = {} response, info = fetch_url(module=module, url=url, headers=json.loads(headers), method='GET') if info['status'] != 200: module.fail_json(msg="Fail: %s" % ("Status: " + str(info['msg']) + ", Message: " + str(info['body']))) try: content = to_text(response.read(), errors='surrogate_or_strict') payload_current = json.loads(content) except AttributeError: content = info.pop('body', '') if title is not None: payload['title'] = title else: payload['title'] = payload_current['title'] if description is not None: payload['description'] = description else: payload['description'] = payload_current['description'] if remove_matches_from_default_stream is not None: payload[ 'remove_matches_from_default_stream'] = remove_matches_from_default_stream else: payload['remove_matches_from_default_stream'] = payload_current[ 'remove_matches_from_default_stream'] if matching_type is not None: payload['matching_type'] = matching_type else: payload['matching_type'] = payload_current['matching_type'] if rules is not None: payload['rules'] = rules else: payload['rules'] = payload_current['rules'] if index_set_id is not None: payload['index_set_id'] = index_set_id else: payload['index_set_id'] = payload_current['index_set_id'] response, info = fetch_url(module=module, url=url, headers=json.loads(headers), method='PUT', data=module.jsonify(payload)) if info['status'] != 200: module.fail_json(msg="Fail: %s" % ("Status: " + str(info['msg']) + ", Message: " + str(info['body']))) try: content = to_text(response.read(), errors='surrogate_or_strict') except AttributeError: content = info.pop('body', '') return info['status'], info['msg'], content, url
def api_query(self, path="/", method="GET", data=None): url = self.api_config['api_endpoint'] + path if data: data_encoded = dict() data_list = "" for k, v in data.items(): if isinstance(v, list): for s in v: try: data_list += '&%s[]=%s' % (k, urllib.quote(s)) except AttributeError: data_list += '&%s[]=%s' % (k, urllib.parse.quote(s)) elif v is not None: data_encoded[k] = v try: data = urllib.urlencode(data_encoded) + data_list except AttributeError: data = urllib.parse.urlencode(data_encoded) + data_list for s in range(0, self.api_config['api_retries']): response, info = fetch_url( module=self.module, url=url, data=data, method=method, headers=self.headers, timeout=self.api_config['api_timeout'], ) # Did we hit the rate limit? if info.get('status') and info.get('status') != 503: break # Vultr has a rate limiting requests per second, try to be polite time.sleep(1) else: self.fail_json(msg="Reached API retries limit %s for URL %s, method %s with data %s. Returned %s, with body: %s %s" % ( self.api_config['api_retries'], url, method, data, info['status'], info['msg'], info.get('body') )) if info.get('status') != 200: self.fail_json(msg="URL %s, method %s with data %s. Returned %s, with body: %s %s" % ( url, method, data, info['status'], info['msg'], info.get('body') )) res = response.read() if not res: return {} try: return self.module.from_json(to_text(res)) except ValueError as e: self.module.fail_json(msg="Could not process response into json: %s" % e)
def get_request(self, uri, parse_json_result=True, headers=None, get_only=False, fail_on_error=True, error_msg=None, expected_status_codes=None): ''' Perform a GET-like request. Will try POST-as-GET for ACMEv2, with fallback to GET if server replies with a status code of 405. ''' if not get_only and self.version != 1: # Try POST-as-GET content, info = self.send_signed_request(uri, None, parse_json_result=False, fail_on_error=False) if info['status'] == 405: # Instead, do unauthenticated GET get_only = True else: # Do unauthenticated GET get_only = True if get_only: # Perform unauthenticated GET resp, info = fetch_url(self.module, uri, method='GET', headers=headers) _assert_fetch_url_success(self.module, resp, info) try: content = resp.read() except AttributeError: content = info.pop('body', None) # Process result parsed_json_result = False if parse_json_result: result = {} if content: if info['content-type'].startswith('application/json'): try: result = self.module.from_json(content.decode('utf8')) parsed_json_result = True except ValueError: raise NetworkException( "Failed to parse the ACME response: {0} {1}". format(uri, content)) else: result = content else: result = content if fail_on_error and _is_failed( info, expected_status_codes=expected_status_codes): raise ACMEProtocolException( self.module, msg=error_msg, info=info, content=content, content_json=result if parsed_json_result else None) return result, info
def send_request(self, commands, output='text', check_status=True, return_error=False, opts=None): # only 10 show commands can be encoded in each request # messages sent to the remote device if opts is None: opts = {} if output != 'config': commands = collections.deque(to_list(commands)) stack = list() requests = list() while commands: stack.append(commands.popleft()) if len(stack) == 10: body = self._request_builder(stack, output) data = self._module.jsonify(body) requests.append(data) stack = list() if stack: body = self._request_builder(stack, output) data = self._module.jsonify(body) requests.append(data) else: body = self._request_builder(commands, 'config') requests = [self._module.jsonify(body)] headers = {'Content-Type': 'application/json'} result = list() timeout = self._module.params['timeout'] use_proxy = self._module.params['provider']['use_proxy'] for req in requests: if self._nxapi_auth: headers['Cookie'] = self._nxapi_auth response, headers = fetch_url( self._module, self._url, data=req, headers=headers, timeout=timeout, method='POST', use_proxy=use_proxy ) self._nxapi_auth = headers.get('set-cookie') if opts.get('ignore_timeout') and re.search(r'(-1|5\d\d)', str(headers['status'])): result.append(headers['status']) return result elif headers['status'] != 200: self._error(**headers) try: response = self._module.from_json(response.read()) except ValueError: self._module.fail_json(msg='unable to parse response') if response['ins_api'].get('outputs'): output = response['ins_api']['outputs']['output'] for item in to_list(output): if check_status is True and item['code'] != '200': if return_error: result.append(item) else: self._error(output=output, **item) elif 'body' in item: result.append(item['body']) # else: # error in command but since check_status is disabled # silently drop it. # result.append(item['msg']) return result
def enforce_state(module, params): """ Add or remove key. """ user = params["user"] key = params["key"] path = params.get("path", None) manage_dir = params.get("manage_dir", True) state = params.get("state", "present") key_options = params.get("key_options", None) exclusive = params.get("exclusive", False) error_msg = "Error getting key from: %s" # if the key is a url, request it and use it as key source if key.startswith("http"): try: resp, info = fetch_url(module, key) if info['status'] != 200: module.fail_json(msg=error_msg % key) else: key = resp.read() except Exception: module.fail_json(msg=error_msg % key) # extract individual keys into an array, skipping blank lines and comments new_keys = [s for s in key.splitlines() if s and not s.startswith('#')] # check current state -- just get the filename, don't create file do_write = False params["keyfile"] = keyfile(module, user, do_write, path, manage_dir) existing_content = readfile(params["keyfile"]) existing_keys = parsekeys(module, existing_content) # Add a place holder for keys that should exist in the state=present and # exclusive=true case keys_to_exist = [] # we will order any non exclusive new keys higher than all the existing keys, # resulting in the new keys being written to the key file after existing keys, but # in the order of new_keys max_rank_of_existing_keys = len(existing_keys) # Check our new keys, if any of them exist we'll continue. for rank_index, new_key in enumerate(new_keys): parsed_new_key = parsekey(module, new_key, rank=rank_index) if not parsed_new_key: module.fail_json(msg="invalid key specified: %s" % new_key) if key_options is not None: parsed_options = parseoptions(module, key_options) # rank here is the rank in the provided new keys, which may be unrelated to rank in existing_keys parsed_new_key = (parsed_new_key[0], parsed_new_key[1], parsed_options, parsed_new_key[3], parsed_new_key[4]) matched = False non_matching_keys = [] if parsed_new_key[0] in existing_keys: # Then we check if everything (except the rank at index 4) matches, including # the key type and options. If not, we append this # existing key to the non-matching list # We only want it to match everything when the state # is present if parsed_new_key[:4] != existing_keys[ parsed_new_key[0]][:4] and state == "present": non_matching_keys.append(existing_keys[parsed_new_key[0]]) else: matched = True # handle idempotent state=present if state == "present": keys_to_exist.append(parsed_new_key[0]) if len(non_matching_keys) > 0: for non_matching_key in non_matching_keys: if non_matching_key[0] in existing_keys: del existing_keys[non_matching_key[0]] do_write = True # new key that didn't exist before. Where should it go in the ordering? if not matched: # We want the new key to be after existing keys if not exclusive (rank > max_rank_of_existing_keys) total_rank = max_rank_of_existing_keys + parsed_new_key[4] # replace existing key tuple with new parsed key with its total rank existing_keys[parsed_new_key[0]] = (parsed_new_key[0], parsed_new_key[1], parsed_new_key[2], parsed_new_key[3], total_rank) do_write = True elif state == "absent": if not matched: continue del existing_keys[parsed_new_key[0]] do_write = True # remove all other keys to honor exclusive # for 'exclusive', make sure keys are written in the order the new keys were if state == "present" and exclusive: to_remove = frozenset(existing_keys).difference(keys_to_exist) for key in to_remove: del existing_keys[key] do_write = True if do_write: filename = keyfile(module, user, do_write, path, manage_dir) new_content = serialize(existing_keys) diff = { 'before_header': params['keyfile'], 'after_header': filename, 'before': existing_content, 'after': new_content, } if module.check_mode: module.exit_json(changed=True, diff=diff) writefile(module, filename, new_content) params['changed'] = True params['diff'] = diff else: if module.check_mode: module.exit_json(changed=False) return params
def url_get(module, url, dest, use_proxy, last_mod_time, force, timeout=10, headers=None, tmp_dest=''): """ Download data from the url and store in a temporary file. Return (tempfile, info about the request) """ if module.check_mode: method = 'HEAD' else: method = 'GET' rsp, info = fetch_url(module, url, use_proxy=use_proxy, force=force, last_mod_time=last_mod_time, timeout=timeout, headers=headers, method=method) if info['status'] == 304: module.exit_json(url=url, dest=dest, changed=False, msg=info.get('msg', '')) # Exceptions in fetch_url may result in a status -1, the ensures a proper error to the user in all cases if info['status'] == -1: module.fail_json(msg=info['msg'], url=url, dest=dest) if info['status'] != 200 and not url.startswith('file:/') and not ( url.startswith('ftp:/') and info.get('msg', '').startswith('OK')): module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'], url=url, dest=dest) # create a temporary file and copy content to do checksum-based replacement if tmp_dest: # tmp_dest should be an existing dir tmp_dest_is_dir = os.path.isdir(tmp_dest) if not tmp_dest_is_dir: if os.path.exists(tmp_dest): module.fail_json( msg="%s is a file but should be a directory." % tmp_dest) else: module.fail_json(msg="%s directory does not exist." % tmp_dest) else: tmp_dest = module.tmpdir fd, tempname = tempfile.mkstemp(dir=tmp_dest) f = os.fdopen(fd, 'wb') try: shutil.copyfileobj(rsp, f) except Exception as e: os.remove(tempname) module.fail_json(msg="failed to create temporary content file: %s" % to_native(e), exception=traceback.format_exc()) f.close() rsp.close() return tempname, info
def main(): argument_spec = aci_argument_spec argument_spec.update( path=dict(type='str', required=True, aliases=['uri']), method=dict(type='str', default='get', choices=['delete', 'get', 'post'], aliases=['action']), src=dict(type='path', aliases=['config_file']), content=dict(type='str'), ) module = AnsibleModule( argument_spec=argument_spec, mutually_exclusive=[['content', 'src']], supports_check_mode=True, ) path = module.params['path'] content = module.params['content'] src = module.params['src'] method = module.params['method'] timeout = module.params['timeout'] # Report missing file file_exists = False if src: if os.path.isfile(src): file_exists = True else: module.fail_json(msg="Cannot find/access src '%s'" % src) # Find request type if path.find('.xml') != -1: rest_type = 'xml' if not HAS_LXML_ETREE: module.fail_json( msg= 'The lxml python library is missing, or lacks etree support.') if not HAS_XMLJSON_COBRA: module.fail_json( msg= 'The xmljson python library is missing, or lacks cobra support.' ) elif path.find('.json') != -1: rest_type = 'json' else: module.fail_json( msg='Failed to find REST API content type (neither .xml nor .json).' ) aci = ACIModule(module) if method == 'get': aci.request(path) module.exit_json(**aci.result) elif module.check_mode: # In check_mode we assume it works, but we don't actually perform the requested change # TODO: Could we turn this request in a GET instead ? aci.result['changed'] = True module.exit_json(response='OK (Check mode)', status=200, **aci.result) # Prepare request data if content: # We include the payload as it may be templated payload = content elif file_exists: with open(src, 'r') as config_object: # TODO: Would be nice to template this, requires action-plugin payload = config_object.read() # Perform actual request using auth cookie (Same as aci_request,but also supports XML) url = '%(protocol)s://%(hostname)s/' % aci.params + path.lstrip('/') resp, info = fetch_url(module, url, data=payload, method=method.upper(), timeout=timeout, headers=aci.headers) aci.result['response'] = info['msg'] aci.result['status'] = info['status'] # Report failure if info['status'] != 200: try: aci_response(aci.result, info['body'], rest_type) module.fail_json( msg='Request failed: %(error_code)s %(error_text)s' % aci.result, **aci.result) except KeyError: module.fail_json(msg='Request failed for %(url)s. %(msg)s' % info, **aci.result) aci_response(aci.result, resp.read(), rest_type) # Report success module.exit_json(**aci.result)
def send_signed_request(self, url, payload, key_data=None, jws_header=None, parse_json_result=True, encode_payload=True): ''' Sends a JWS signed HTTP POST request to the ACME server and returns the response as dictionary https://tools.ietf.org/html/rfc8555#section-6.2 If payload is None, a POST-as-GET is performed. (https://tools.ietf.org/html/rfc8555#section-6.3) ''' key_data = key_data or self.key_data jws_header = jws_header or self.jws_header failed_tries = 0 while True: protected = copy.deepcopy(jws_header) protected["nonce"] = self.directory.get_nonce() if self.version != 1: protected["url"] = url self._log('URL', url) self._log('protected', protected) self._log('payload', payload) data = self.sign_request(protected, payload, key_data, encode_payload=encode_payload) if self.version == 1: data["header"] = jws_header.copy() for k, v in protected.items(): hv = data["header"].pop(k, None) self._log('signed request', data) data = self.module.jsonify(data) headers = { 'Content-Type': 'application/jose+json', } resp, info = fetch_url(self.module, url, data=data, headers=headers, method='POST') _assert_fetch_url_success(resp, info) result = {} try: content = resp.read() except AttributeError: content = info.pop('body', None) if content or not parse_json_result: if (parse_json_result and info['content-type'].startswith('application/json')) or 400 <= info['status'] < 600: try: decoded_result = self.module.from_json(content.decode('utf8')) self._log('parsed result', decoded_result) # In case of badNonce error, try again (up to 5 times) # (https://tools.ietf.org/html/rfc8555#section-6.7) if (400 <= info['status'] < 600 and decoded_result.get('type') == 'urn:ietf:params:acme:error:badNonce' and failed_tries <= 5): failed_tries += 1 continue if parse_json_result: result = decoded_result else: result = content except ValueError: raise ModuleFailException("Failed to parse the ACME response: {0} {1}".format(url, content)) else: result = content return result, info
def main(): module = AnsibleModule( argument_spec=dict( command=dict(required=False, default=None, choices=[ 'ping', 'kv_test', 'join', 'plan', 'commit']), config_dir=dict(default='/etc/riak', type='path'), http_conn=dict(required=False, default='127.0.0.1:8098'), target_node=dict(default='[email protected]', required=False), wait_for_handoffs=dict(default=False, type='int'), wait_for_ring=dict(default=False, type='int'), wait_for_service=dict( required=False, default=None, choices=['kv']), validate_certs=dict(default='yes', type='bool')) ) command = module.params.get('command') http_conn = module.params.get('http_conn') target_node = module.params.get('target_node') wait_for_handoffs = module.params.get('wait_for_handoffs') wait_for_ring = module.params.get('wait_for_ring') wait_for_service = module.params.get('wait_for_service') # make sure riak commands are on the path riak_bin = module.get_bin_path('riak') riak_admin_bin = module.get_bin_path('riak-admin') timeout = time.time() + 120 while True: if time.time() > timeout: module.fail_json(msg='Timeout, could not fetch Riak stats.') (response, info) = fetch_url(module, 'http://%s/stats' % (http_conn), force=True, timeout=5) if info['status'] == 200: stats_raw = response.read() break time.sleep(5) # here we attempt to load those stats, try: stats = json.loads(stats_raw) except Exception: module.fail_json(msg='Could not parse Riak stats.') node_name = stats['nodename'] nodes = stats['ring_members'] ring_size = stats['ring_creation_size'] rc, out, err = module.run_command([riak_bin, 'version']) version = out.strip() result = dict(node_name=node_name, nodes=nodes, ring_size=ring_size, version=version) if command == 'ping': cmd = '%s ping %s' % (riak_bin, target_node) rc, out, err = module.run_command(cmd) if rc == 0: result['ping'] = out else: module.fail_json(msg=out) elif command == 'kv_test': cmd = '%s test' % riak_admin_bin rc, out, err = module.run_command(cmd) if rc == 0: result['kv_test'] = out else: module.fail_json(msg=out) elif command == 'join': if nodes.count(node_name) == 1 and len(nodes) > 1: result['join'] = 'Node is already in cluster or staged to be in cluster.' else: cmd = '%s cluster join %s' % (riak_admin_bin, target_node) rc, out, err = module.run_command(cmd) if rc == 0: result['join'] = out result['changed'] = True else: module.fail_json(msg=out) elif command == 'plan': cmd = '%s cluster plan' % riak_admin_bin rc, out, err = module.run_command(cmd) if rc == 0: result['plan'] = out if 'Staged Changes' in out: result['changed'] = True else: module.fail_json(msg=out) elif command == 'commit': cmd = '%s cluster commit' % riak_admin_bin rc, out, err = module.run_command(cmd) if rc == 0: result['commit'] = out result['changed'] = True else: module.fail_json(msg=out) # this could take a while, recommend to run in async mode if wait_for_handoffs: timeout = time.time() + wait_for_handoffs while True: cmd = '%s transfers' % riak_admin_bin rc, out, err = module.run_command(cmd) if 'No transfers active' in out: result['handoffs'] = 'No transfers active.' break time.sleep(10) if time.time() > timeout: module.fail_json(msg='Timeout waiting for handoffs.') if wait_for_service: cmd = [riak_admin_bin, 'wait_for_service', 'riak_%s' % wait_for_service, node_name] rc, out, err = module.run_command(cmd) result['service'] = out if wait_for_ring: timeout = time.time() + wait_for_ring while True: if ring_check(module, riak_admin_bin): break time.sleep(10) if time.time() > timeout: module.fail_json(msg='Timeout waiting for nodes to agree on ring.') result['ring_ready'] = ring_check(module, riak_admin_bin) module.exit_json(**result)
def grafana_create_datasource(module, data): # define data payload for grafana API payload = {'orgId': data['org_id'], 'name': data['name'], 'type': data['ds_type'], 'access': data['access'], 'url': data['url'], 'database': data['database'], 'withCredentials': data['with_credentials'], 'isDefault': data['is_default'], 'user': data['user'], 'password': data['password']} # define basic auth if 'basic_auth_user' in data and data['basic_auth_user'] and 'basic_auth_password' in data and data['basic_auth_password']: payload['basicAuth'] = True payload['basicAuthUser'] = data['basic_auth_user'] payload['basicAuthPassword'] = data['basic_auth_password'] else: payload['basicAuth'] = False # define tls auth json_data = {} if data.get('tls_client_cert') and data.get('tls_client_key'): json_data['tlsAuth'] = True if data.get('tls_ca_cert'): payload['secureJsonData'] = { 'tlsCACert': data['tls_ca_cert'], 'tlsClientCert': data['tls_client_cert'], 'tlsClientKey': data['tls_client_key'] } json_data['tlsAuthWithCACert'] = True else: payload['secureJsonData'] = { 'tlsClientCert': data['tls_client_cert'], 'tlsClientKey': data['tls_client_key'] } else: json_data['tlsAuth'] = False json_data['tlsAuthWithCACert'] = False if data.get('tls_ca_cert'): payload['secureJsonData'] = { 'tlsCACert': data['tls_ca_cert'] } if data.get('tls_skip_verify'): json_data['tlsSkipVerify'] = True # datasource type related parameters if data['ds_type'] == 'elasticsearch': json_data['esVersion'] = data['es_version'] json_data['timeField'] = data['time_field'] if data.get('interval'): json_data['interval'] = data['interval'] if data['es_version'] >= 56: json_data['maxConcurrentShardRequests'] = data['max_concurrent_shard_requests'] if data['ds_type'] == 'elasticsearch' or data['ds_type'] == 'influxdb': if data.get('time_interval'): json_data['timeInterval'] = data['time_interval'] if data['ds_type'] == 'opentsdb': json_data['tsdbVersion'] = data['tsdb_version'] if data['tsdb_resolution'] == 'second': json_data['tsdbResolution'] = 1 else: json_data['tsdbResolution'] = 2 if data['ds_type'] == 'postgres': json_data['sslmode'] = data['sslmode'] if data.get('password'): payload['secureJsonData'] = {'password': data.get('password')} if data['ds_type'] == 'alexanderzobnin-zabbix-datasource': if data.get('trends'): json_data['trends'] = True if data['ds_type'] == 'cloudwatch': if data.get('aws_credentials_profle'): payload['database'] = data.get('aws_credentials_profile') json_data['authType'] = data['aws_auth_type'] json_data['defaultRegion'] = data['aws_default_region'] if data.get('aws_custom_metrics_namespaces'): json_data['customMetricsNamespaces'] = data.get('aws_custom_metrics_namespaces') if data.get('aws_assume_role_arn'): json_data['assumeRoleArn'] = data.get('aws_assume_role_arn') if data.get('aws_access_key') and data.get('aws_secret_key'): payload['secureJsonData'] = {'accessKey': data.get('aws_access_key'), 'secretKey': data.get('aws_secret_key')} payload['jsonData'] = json_data # define http header headers = grafana_headers(module, data) # test if datasource already exists datasource_exists, ds = grafana_datasource_exists(module, data['grafana_url'], data['name'], headers=headers) result = {} if datasource_exists is True: del ds['typeLogoUrl'] if ds.get('version'): del ds['version'] if ds.get('readOnly'): del ds['readOnly'] if ds['basicAuth'] is False: del ds['basicAuthUser'] del ds['basicAuthPassword'] if 'jsonData' in ds: if 'tlsAuth' in ds['jsonData'] and ds['jsonData']['tlsAuth'] is False: del ds['secureJsonFields'] if 'tlsAuth' not in ds['jsonData']: del ds['secureJsonFields'] payload['id'] = ds['id'] if ds == payload: # unchanged result['name'] = data['name'] result['id'] = ds['id'] result['msg'] = "Datasource %s unchanged." % data['name'] result['changed'] = False else: # update r, info = fetch_url(module, '%s/api/datasources/%d' % (data['grafana_url'], ds['id']), data=json.dumps(payload), headers=headers, method='PUT') if info['status'] == 200: res = json.loads(to_text(r.read(), errors='surrogate_or_strict')) result['name'] = data['name'] result['id'] = ds['id'] result['before'] = ds result['after'] = payload result['msg'] = "Datasource %s updated %s" % (data['name'], res['message']) result['changed'] = True else: raise GrafanaAPIException('Unable to update the datasource id %d : %s' % (ds['id'], info)) else: # create r, info = fetch_url(module, '%s/api/datasources' % data['grafana_url'], data=json.dumps(payload), headers=headers, method='POST') if info['status'] == 200: res = json.loads(to_text(r.read(), errors='surrogate_or_strict')) result['msg'] = "Datasource %s created : %s" % (data['name'], res['message']) result['changed'] = True result['name'] = data['name'] result['id'] = res['id'] else: raise GrafanaAPIException('Unable to create the new datasource %s : %s - %s.' % (data['name'], info['status'], info)) return result
def request(self, path, method=None, data=None, qs=None, api_version="v1"): ''' Generic HTTP method for MSO requests. ''' self.path = path if method is not None: self.method = method # If we PATCH with empty operations, return if method == 'PATCH' and not data: return {} # if method in ['PATCH', 'PUT']: # if qs is not None: # qs['enableVersionCheck'] = 'true' # else: # qs = dict(enableVersionCheck='true') if method in ['PATCH']: if qs is not None: qs['validate'] = 'false' else: qs = dict(validate='false') resp = None if self.module._socket_path: self.connection.set_params(self.params) if api_version is not None: uri = '/mso/api/{0}/{1}'.format(api_version, self.path) else: uri = self.path if qs is not None: uri = uri + update_qs(qs) try: info = self.connection.send_request(method, uri, json.dumps(data)) self.url = info.get('url') info.pop('date') except Exception as e: try: error_obj = json.loads(to_text(e)) except Exception: error_obj = dict(error=dict( code=-1, message= "Unable to parse error output as JSON. Raw error message: {0}" .format(e), exception=to_text(e))) pass self.fail_json(msg=error_obj['error']['message']) else: if api_version is not None: self.url = '{0}api/{1}/{2}'.format(self.base_only_uri, api_version, self.path.lstrip('/')) else: self.url = '{0}{1}'.format(self.base_only_uri, self.path.lstrip('/')) if qs is not None: self.url = self.url + update_qs(qs) resp, info = fetch_url(self.module, self.url, headers=self.headers, data=json.dumps(data), method=self.method, timeout=self.params.get('timeout'), use_proxy=self.params.get('use_proxy')) self.response = info.get('msg') self.status = info.get('status', -1) # Get change status from HTTP headers if 'modified' in info: self.has_modified = True if info.get('modified') == 'false': self.result['changed'] = False elif info.get('modified') == 'true': self.result['changed'] = True # 200: OK, 201: Created, 202: Accepted if self.status in (200, 201, 202): try: output = resp.read() if output: try: return json.loads(output) except Exception as e: self.error = dict( code=-1, message= "Unable to parse output as JSON, see 'raw' output. {0}" .format(e)) self.result['raw'] = output return except AttributeError: return info.get('body') # 204: No Content elif self.status == 204: return {} # 404: Not Found elif self.method == 'DELETE' and self.status == 404: return {} # 400: Bad Request, 401: Unauthorized, 403: Forbidden, # 405: Method Not Allowed, 406: Not Acceptable # 500: Internal Server Error, 501: Not Implemented elif self.status >= 400: self.result['status'] = self.status body = info.get('body') if body is not None: try: if isinstance(body, dict): payload = body else: payload = json.loads(body) except Exception as e: self.error = dict( code=-1, message= "Unable to parse output as JSON, see 'raw' output. %s" % e) self.result['raw'] = body self.fail_json(msg='MSO Error:', data=data, info=info) self.error = payload if 'code' in payload: self.fail_json( msg='MSO Error {code}: {message}'.format(**payload), data=data, info=info, payload=payload) else: self.fail_json(msg='MSO Error:'.format(**payload), data=data, info=info, payload=payload) else: # Connection error msg = 'Connection failed for {0}. {1}'.format( info.get('url'), info.get('msg')) self.error = msg self.fail_json(msg=msg) return {}
def create( module, url, check, creator, expire, expire_on_resolve, reason, subscription): (rc, out, changed) = query(module, url, check, subscription) for i in out: if (i['subscription'] == subscription): if ( (check is None or check == i['check']) and ( creator == '' or creator == i['creator'])and ( reason == '' or reason == i['reason']) and ( expire is None or expire == i['expire']) and ( expire_on_resolve is None or expire_on_resolve == i['expire_on_resolve'] ) ): return False, out, False # module.check_mode is inherited from the AnsibleMOdule class if not module.check_mode: headers = { 'Content-Type': 'application/json', } url = url + '/silenced' request_data = { 'check': check, 'creator': creator, 'expire': expire, 'expire_on_resolve': expire_on_resolve, 'reason': reason, 'subscription': subscription, } # Remove keys with None value for k, v in dict(request_data).items(): if v is None: del request_data[k] response, info = fetch_url( module, url, method='POST', headers=headers, data=json.dumps(request_data) ) if info['status'] != 201: module.fail_json( msg="Failed to silence %s. Reason: %s" % (subscription, info['msg']) ) try: json_out = json.loads(response.read()) except: json_out = "" return False, json_out, True return False, out, True
def request_download(self, path, destination=None): self.url = urljoin(self.baseuri, path) redirected = False redir_info = {} redirect = {} src = self.params.get('src') if src: try: self.headers.update({'Content-Length': os.stat(src).st_size}) data = open(src, 'rb') except OSError: self.fail_json(msg='Unable to open source file %s' % src, elapsed=0) else: pass data = None kwargs = {} if destination is not None: if os.path.isdir(destination): # first check if we are redirected to a file download check, redir_info = fetch_url( self.module, self.url, headers=self.headers, method='GET', timeout=self.params.get('timeout')) # if we are redirected, update the url with the location header, # and update dest with the new url filename if redir_info['status'] in (301, 302, 303, 307): self.url = redir_info.get('location') redirected = True destination = os.path.join( destination, check.headers.get("Content-Disposition").split("filename=") [1]) # if destination file already exist, only download if file newer if os.path.exists(destination): kwargs['last_mod_time'] = datetime.datetime.utcfromtimestamp( os.path.getmtime(destination)) resp, info = fetch_url(self.module, self.url, data=data, headers=self.headers, method='GET', timeout=self.params.get('timeout'), unix_socket=self.params.get('unix_socket'), **kwargs) try: content = resp.read() except AttributeError: # there was no content, but the error read() may have been stored in the info as 'body' content = info.pop('body', '') if src: # Try to close the open file handle try: data.close() except Exception: pass redirect['redirected'] = redirected or info.get('url') != self.url redirect.update(redir_info) redirect.update(info) write_file(self.module, self.url, destination, content, redirect) return redirect, destination
def request_upload(self, path, fields=None): ''' Generic HTTP MultiPart POST method for MSO uploads. ''' self.path = path self.url = urljoin(self.baseuri, path) if not HAS_MULTIPART_ENCODER: self.fail_json( msg= 'requests-toolbelt is required for the upload state of this module' ) mp_encoder = MultipartEncoder(fields=fields) self.headers['Content-Type'] = mp_encoder.content_type self.headers['Accept-Encoding'] = "gzip, deflate, br" resp, info = fetch_url(self.module, self.url, headers=self.headers, data=mp_encoder, method='POST', timeout=self.params.get('timeout'), use_proxy=self.params.get('use_proxy')) self.response = info.get('msg') self.status = info.get('status') # Get change status from HTTP headers if 'modified' in info: self.has_modified = True if info.get('modified') == 'false': self.result['changed'] = False elif info.get('modified') == 'true': self.result['changed'] = True # 200: OK, 201: Created, 202: Accepted, 204: No Content if self.status in (200, 201, 202, 204): output = resp.read() if output: return json.loads(output) # 400: Bad Request, 401: Unauthorized, 403: Forbidden, # 405: Method Not Allowed, 406: Not Acceptable # 500: Internal Server Error, 501: Not Implemented elif self.status >= 400: try: payload = json.loads(resp.read()) except (ValueError, AttributeError): try: payload = json.loads(info.get('body')) except Exception: self.fail_json(msg='MSO Error:', info=info) if 'code' in payload: self.fail_json( msg='MSO Error {code}: {message}'.format(**payload), info=info, payload=payload) else: self.fail_json(msg='MSO Error:'.format(**payload), info=info, payload=payload) return {}
def find_model(self, search_criteria, ret_attrs=None): """ Search for a model in /models :param search_criteria: The XML <rs:search-criteria> :type search_criteria: str :param ret_attrs: List of attributes by name or ID to return back (default is Model_Handle) :type ret_attrs: list returns: Dictionary mapping of ret_attrs to values: {ret_attr: ret_val} rtype: dict """ # If no return attributes were asked for, return Model_Handle. if ret_attrs is None: ret_attrs = ['Model_Handle'] # Set the XML <rs:requested-attribute id=<id>> tags. If no hex ID # is found for the name, assume it is already in hex. {name: hex ID} rqstd_attrs = "" for ra in ret_attrs: _id = self.attr_id(ra) or ra rqstd_attrs += '<rs:requested-attribute id="%s" />' % ( self.attr_id(ra) or ra) # Build the complete XML search query for HTTP POST. xml = """<?xml version="1.0" encoding="UTF-8"?> <rs:model-request throttlesize="5" xmlns:rs="http://www.ca.com/spectrum/restful/schema/request" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.ca.com/spectrum/restful/schema/request ../../../xsd/Request.xsd"> <rs:target-models> <rs:models-search> <rs:search-criteria xmlns="http://www.ca.com/spectrum/restful/schema/filter"> {0} </rs:search-criteria> </rs:models-search> </rs:target-models> {1} </rs:model-request> """.format(search_criteria, rqstd_attrs) # POST to /models and fail on errors. url = self.build_url("/models") resp, info = fetch_url(self.module, url, data=xml, method="POST", use_proxy=self.module.params['use_proxy'], headers={ "Content-Type": "application/xml", "Accept": "application/xml" }) status_code = info["status"] if status_code >= 400: body = info['body'] else: body = "" if resp is None else resp.read() if status_code != 200: self.result['msg'] = "HTTP POST error %s: %s: %s" % (status_code, url, body) self.module.fail_json(**self.result) # Parse through the XML response and fail on any detected errors. root = ET.fromstring(body) total_models = int(root.attrib['total-models']) error = root.attrib['error'] model_responses = root.find('ca:model-responses', self.resp_namespace) if total_models < 1: self.result[ 'msg'] = "No models found matching search criteria `%s'" % search_criteria self.module.fail_json(**self.result) elif total_models > 1: self.result['msg'] = "More than one model found (%s): `%s'" % ( total_models, ET.tostring(model_responses, encoding='unicode')) self.module.fail_json(**self.result) if error != "EndOfResults": self.result['msg'] = "Unexpected search response `%s': %s" % ( error, ET.tostring(model_responses, encoding='unicode')) self.module.fail_json(**self.result) model = model_responses.find('ca:model', self.resp_namespace) attrs = model.findall('ca:attribute', self.resp_namespace) if not attrs: self.result['msg'] = "No attributes returned." self.module.fail_json(**self.result) # XML response should be successful. Iterate and set each returned # attribute ID/name and value for return. ret = dict() for attr in attrs: attr_id = attr.get('id') attr_name = self.attr_name(attr_id) # Note: all values except empty strings (None) are strings only! attr_val = attr.text key = attr_name if attr_name in ret_attrs else attr_id ret[key] = attr_val ret_attrs.remove(key) return ret
def main(): module = AnsibleModule( # not checking because of daisy chain to file module argument_spec=dict( src=dict(type='path', required=True), original_basename=dict( type='str' ), # used to handle 'dest is a directory' via template, a slight hack dest=dict(type='path', required=True), remote_src=dict(type='bool', default=False), creates=dict(type='path'), list_files=dict(type='bool', default=False), keep_newer=dict(type='bool', default=False), exclude=dict(type='list', default=[]), extra_opts=dict(type='list', default=[]), validate_certs=dict(type='bool', default=True), ), add_file_common_args=True, # check-mode only works for zip files, we cover that later supports_check_mode=True, ) src = module.params['src'] dest = module.params['dest'] remote_src = module.params['remote_src'] file_args = module.load_file_common_arguments(module.params) # did tar file arrive? if not os.path.exists(src): if not remote_src: module.fail_json(msg="Source '%s' failed to transfer" % src) # If remote_src=true, and src= contains ://, try and download the file to a temp directory. elif '://' in src: tempdir = os.path.dirname(os.path.realpath(__file__)) package = os.path.join(tempdir, str(src.rsplit('/', 1)[1])) try: rsp, info = fetch_url(module, src) # If download fails, raise a proper exception if rsp is None: raise Exception(info['msg']) # open in binary mode for python3 f = open(package, 'wb') # Read 1kb at a time to save on ram while True: data = rsp.read(BUFSIZE) data = to_bytes(data, errors='surrogate_or_strict') if len(data) < 1: break # End of file, break while loop f.write(data) f.close() src = package except Exception as e: module.fail_json(msg="Failure downloading %s, %s" % (src, to_native(e))) else: module.fail_json(msg="Source '%s' does not exist" % src) if not os.access(src, os.R_OK): module.fail_json(msg="Source '%s' not readable" % src) # skip working with 0 size archives try: if os.path.getsize(src) == 0: module.fail_json(msg="Invalid archive '%s', the file is 0 bytes" % src) except Exception as e: module.fail_json(msg="Source '%s' not readable, %s" % (src, to_native(e))) # is dest OK to receive tar file? if not os.path.isdir(dest): module.fail_json(msg="Destination '%s' is not a directory" % dest) handler = pick_handler(src, dest, file_args, module) res_args = dict(handler=handler.__class__.__name__, dest=dest, src=src) # do we need to do unpack? check_results = handler.is_unarchived() # DEBUG # res_args['check_results'] = check_results if module.check_mode: res_args['changed'] = not check_results['unarchived'] elif check_results['unarchived']: res_args['changed'] = False else: # do the unpack try: res_args['extract_results'] = handler.unarchive() if res_args['extract_results']['rc'] != 0: module.fail_json(msg="failed to unpack %s to %s" % (src, dest), **res_args) except IOError: module.fail_json(msg="failed to unpack %s to %s" % (src, dest), **res_args) else: res_args['changed'] = True # Get diff if required if check_results.get('diff', False): res_args['diff'] = {'prepared': check_results['diff']} # Run only if we found differences (idempotence) or diff was missing if res_args.get('diff', True) and not module.check_mode: # do we need to change perms? for filename in handler.files_in_archive: file_args['path'] = os.path.join(dest, filename) try: res_args['changed'] = module.set_fs_attributes_if_different( file_args, res_args['changed'], expand=False) except (IOError, OSError) as e: module.fail_json( msg="Unexpected error when accessing exploded file: %s" % to_native(e), **res_args) if module.params['list_files']: res_args['files'] = handler.files_in_archive module.exit_json(**res_args)