def test_send_request_should_return_error_info_when_http_error_raises(self): self.connection_mock.send.side_effect = HTTPError('http://testhost.com', 500, '', {}, StringIO('{"errorMessage": "ERROR"}')) resp = self.checkpoint_plugin.send_request('/test', None) assert resp == (500, {'errorMessage': 'ERROR'})
def test_modules_provisioned_raises(self): self.connection.send.side_effect = HTTPError( 'https://bigip.local/mgmt/tm/sys/provision', 400, '', {}, StringIO('{"errorMessage": "ERROR"}') ) with self.assertRaises(F5ModuleError) as res: modules_provisioned(self.client) assert "{'errorMessage': 'ERROR'}" in str(res.exception)
def make_cookies(*args, **kwargs): cookies = kwargs['cookies'] r = MagicMock() try: r.headers = HTTPMessage() add_header = r.headers.add_header except TypeError: # PY2 r.headers = HTTPMessage(StringIO()) add_header = r.headers.addheader r.info.return_value = r.headers for name, value in (('Foo', 'bar'), ('Baz', 'qux')): cookie = Cookie( version=0, name=name, value=value, port=None, port_specified=False, domain="ansible.com", domain_specified=True, domain_initial_dot=False, path="/", path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest=None ) cookies.set_cookie(cookie) add_header('Set-Cookie', '%s=%s' % (name, value)) return r
def remove_namespaces(rpc_reply): xslt = transform_reply() parser = etree.XMLParser(remove_blank_text=True) xslt_doc = etree.parse(BytesIO(xslt), parser) transform = etree.XSLT(xslt_doc) return etree.fromstring(str(transform(etree.parse(StringIO(str(rpc_reply))))))
def test_bigiq_version_raises_on_http_error(self): self.connection.send.side_effect = HTTPError( 'https://bigiq.local/mgmt/shared/resolver/device-groups/cm-shared-all-big-iqs/devices', 400, '', {}, StringIO('{"errorMessage": "ERROR"}')) with self.assertRaises(F5ModuleError) as res: bigiq_version(self.client) assert '{\'errorMessage\': \'ERROR\'}' in str(res.exception)
def test_login_raises_exception_when_http_error(self): self.connection_mock.send.side_effect = HTTPError('http://testhost.com', 400, '', {}, StringIO('{"message": "Failed to authenticate user"}')) with self.assertRaises(ConnectionError) as res: self.ftd_plugin.login('foo', 'bar') assert 'Failed to authenticate user' in str(res.exception)
def test_send_request_should_return_error_info_when_http_error_raises(self): self.connection_mock.send.side_effect = HTTPError('http://testhost.com', 500, '', {}, StringIO('{"errorMessage": "ERROR"}')) resp = self.ftd_plugin.send_request('/test', HTTPMethod.GET) assert {ResponseParams.SUCCESS: False, ResponseParams.STATUS_CODE: 500, ResponseParams.RESPONSE: {'errorMessage': 'ERROR'}} == resp
def test_send_request_should_return_error_info_when_http_error_raises( self): self.connection_mock.send.side_effect = HTTPError( 'http://f5cs.com', 500, '', {}, StringIO('{"errorMessage": "ERROR"}')) resp = self.f5cs_plugin.send_request('/login', None) assert resp == dict(code=500, contents={'errorMessage': 'ERROR'})
def test_send_request_should_return_error_info_when_http_error_raises(self): self.connection.send.side_effect = HTTPError( 'http://bigip.local', 400, '', {}, StringIO('{"errorMessage": "ERROR"}') ) with self.assertRaises(AnsibleConnectionFailure) as res: self.connection.httpapi.login('foo', 'bar') assert "Authentication process failed, server returned: {'errorMessage': 'ERROR'}" in str(res.exception)
def test_download_file_http_error(self): self.connection.send.side_effect = [ HTTPError('http://bigip.local', 400, '', {}, StringIO('{"errorMessage": "ERROR"}')) ] with self.assertRaises(HTTPError) as res: self.connection.download_file('/fake/path/to/download/fakefile', '/tmp/fakefile') assert res.exception.code == 400
def cfgparser(): CFGDATA = StringIO(""" [defaults] defaults_one = 'data_defaults_one' [level1] level1_one = 'data_level1_one' """) p = configparser.ConfigParser() p.readfp(CFGDATA) return p
def test_upload_file_total_failure(self): self.connection.send.side_effect = HTTPError( 'http://bigip.local', 400, '', {}, StringIO('{"errorMessage": "ERROR"}') ) binary_file = os.path.join(fixture_path, 'test_binary_file.mock') with self.assertRaises(AnsibleConnectionFailure) as res: self.connection.httpapi.upload_file('/fake/path/to/upload', binary_file) assert 'Failed to upload file too many times.' in str(res.exception) assert self.connection.send.call_count == 3
def test_fetch_url_httperror(open_url_mock, fake_ansible_module): open_url_mock.side_effect = urllib_error.HTTPError( 'http://ansible.com/', 500, 'Internal Server Error', {}, StringIO('TESTS') ) r, info = fetch_url(fake_ansible_module, 'http://ansible.com/') assert info == {'msg': 'HTTP Error 500: Internal Server Error', 'body': 'TESTS', 'status': 500, 'url': 'http://ansible.com/'}
def test_send_request_should_return_error_info_when_http_error_raises( self ): self.connection_mock.send.side_effect = HTTPError( "http://testhost.com", 500, "", {}, StringIO('{"errorMessage": "ERROR"}'), ) resp = self.checkpoint_plugin.send_request("/test", None) assert resp == (500, {"errorMessage": "ERROR"})
def test_upload_file_retry(self): self.connection.send.side_effect = [HTTPError( 'http://bigip.local', 400, '', {}, StringIO('{"errorMessage": "ERROR"}') ), True] binary_file = os.path.join(fixture_path, 'test_binary_file.mock') self.connection.httpapi.upload_file('/fake/path/to/upload', binary_file) self.connection.send.assert_called_with( '/fake/path/to/upload/test_binary_file.mock', ANY, method='POST', headers={'Content-Range': '0-307199/307200', 'Content-Type': 'application/octet-stream', 'Connection': 'keep-alive' } ) assert self.connection.send.call_count == 2
def run(self, tmp=None, task_vars=None): ''' handler for textfsm action ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect try: if not HAS_TEXTFSM: raise AnsibleError('textfsm_parser engine requires the TextFSM library to be installed') try: filename = self._task.args.get('file') src = self._task.args.get('src') content = self._task.args['content'] name = self._task.args.get('name') except KeyError as exc: raise AnsibleError('missing required argument: %s' % exc) if src and filename: raise AnsibleError('`src` and `file` are mutually exclusive arguments') if not isinstance(content, string_types): return {'failed': True, 'msg': '`content` must be of type str, got %s' % type(content)} if filename: tmpl = open(filename) else: tmpl = StringIO() tmpl.write(src.strip()) tmpl.seek(0) try: re_table = textfsm.TextFSM(tmpl) fsm_results = re_table.ParseText(content) except Exception as exc: raise AnsibleError(str(exc)) final_facts = [] for item in fsm_results: facts = {} facts.update(dict(zip(re_table.header, item))) final_facts.append(facts) if name: result['ansible_facts'] = {name: final_facts} else: result['ansible_facts'] = {} finally: self._remove_tmp_path(self._connection._shell.tmpdir) return result
def run(self, terms, variables, **kwargs): res = [] try: xml_data = terms[0] except IndexError: raise AnsibleError( "Either xml string or path to xml file must be specified") try: yang_file = kwargs['yang_file'] except KeyError: raise AnsibleError("value of 'yang_file' must be specified") yang_file = os.path.realpath(os.path.expanduser(yang_file)) if not os.path.isfile(yang_file): # Maybe we are passing a glob? yang_files = glob.glob(yang_file) if not yang_files: # Glob returned no files raise AnsibleError('%s invalid file path' % yang_file) else: yang_files = [yang_file] search_path = kwargs.pop('search_path', '') keep_tmp_files = kwargs.pop('keep_tmp_files', False) abs_search_path = None for path in search_path.split(':'): path = os.path.realpath(os.path.expanduser(path)) if abs_search_path is None: abs_search_path = path else: abs_search_path += ':' + path if path != '' and not os.path.isdir(path): raise AnsibleError('%s is invalid directory path' % path) search_path = abs_search_path plugindir = unfrackpath(XM2JSONL_DIR_PATH) makedirs_safe(plugindir) if os.path.isfile(xml_data): # input is xml file path xml_file_path = xml_data else: # input is xml string, copy it to file in temporary location xml_file_path = os.path.join(XM2JSONL_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'xml')) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) with open(xml_file_path, 'w') as f: if not xml_data.startswith('<?xml version'): xml_data = '<?xml version="1.0" encoding="UTF-8"?>\n' + xml_data data = xml_data f.write(data) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) try: # validate xml etree.parse(xml_file_path) display.vvvv("Parsing xml data from temporary file: %s" % xml_file_path) except Exception as exc: if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True) raise AnsibleError("Failed to load xml data: %s" % (to_text(exc, errors='surrogate_or_strict'))) base_pyang_path = sys.modules['pyang'].__file__ pyang_exec_path = find_file_in_path('pyang') pyang_exec = imp.load_source('pyang', pyang_exec_path) saved_arg = deepcopy(sys.argv) sys.modules['pyang'].__file__ = base_pyang_path saved_stdout = sys.stdout saved_stderr = sys.stderr sys.stdout = sys.stderr = StringIO() xsl_file_path = os.path.join(XM2JSONL_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'xsl')) json_file_path = os.path.join(XM2JSONL_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'json')) xls_file_path = os.path.realpath(os.path.expanduser(xsl_file_path)) json_file_path = os.path.realpath(os.path.expanduser(json_file_path)) # fill in the sys args before invoking pyang sys.argv = [ pyang_exec_path, '-f', 'jsonxsl', '-o', xls_file_path, '-p', search_path, "--lax-quote-checks" ] + yang_files display.display("Generating xsl file '%s' by executing command '%s'" % (xls_file_path, ' '.join(sys.argv)), log_only=True) try: pyang_exec.run() except SystemExit: pass except Exception as e: if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True) raise AnsibleError( 'Error while generating intermediate (xsl) file: %s' % e) finally: err = sys.stderr.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True) raise AnsibleError( 'Error while generating (xsl) intermediate file: %s' % err) xsltproc_exec_path = find_file_in_path('xsltproc') # fill in the sys args before invoking xsltproc sys.argv = [ xsltproc_exec_path, '-o', json_file_path, xsl_file_path, xml_file_path ] display.display( "Generating json data in temp file '%s' by executing command '%s'" % (json_file_path, ' '.join(sys.argv)), log_only=True) time.sleep(5) try: os.system(' '.join(sys.argv)) except SystemExit: pass finally: err = sys.stderr.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while translating to json: %s' % err) sys.argv = saved_arg sys.stdout = saved_stdout sys.stderr = saved_stderr try: display.vvvv("Reading output json data from temporary file: %s" % json_file_path) with open(json_file_path) as fp: content = json.load(fp) except Exception as e: raise AnsibleError('Error while reading json document: %s' % e) finally: if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True) res.append(content) return res
def json_to_xml(self, json_data, tmp_dir_path): """ The method translates JSON data encoded as per YANG model (RFC 7951) to XML payload :param json_data: JSON data that should to translated to XML :param tmp_dir_path: Temporary directory path to copy intermediate files :return: XML data in string format. """ saved_arg = deepcopy(sys.argv) saved_stdout = sys.stdout saved_stderr = sys.stderr sys.stdout = sys.stderr = StringIO() if isinstance(json_data, dict): # input is in json format, copy it to file in temporary location json_file_path = os.path.join( tmp_dir_path, "%s.%s" % (str(uuid.uuid4()), "json")) json_file_path = os.path.realpath( os.path.expanduser(json_file_path)) with open(json_file_path, "w") as f: f.write(json.dumps(json_data)) json_file_path = os.path.realpath( os.path.expanduser(json_file_path)) elif os.path.isfile(json_data): json_file_path = json_data else: raise ValueError("unable to create/find temporary json file %s" % json_data) try: # validate json with open(json_file_path) as fp: json.load(fp) except Exception as exc: raise ValueError("Failed to load json configuration: %s" % (to_text(exc, errors="surrogate_or_strict"))) jtox_file_path = os.path.join(tmp_dir_path, "%s.%s" % (str(uuid.uuid4()), "jtox")) xml_file_path = os.path.join(tmp_dir_path, "%s.%s" % (str(uuid.uuid4()), "xml")) jtox_file_path = os.path.realpath(os.path.expanduser(jtox_file_path)) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) yang_metada_dir = os.path.join( os.path.dirname(os.path.abspath(__file__)), "files/yang") yang_metadata_path = os.path.join(yang_metada_dir, "nc-op.yang") self._search_path += ":%s" % yang_metada_dir # fill in the sys args before invoking pyang sys.argv = ([ self._pyang_exec_path, "-f", "jtox", "-o", jtox_file_path, "-p", self._search_path, "--lax-quote-checks", ] + self._yang_files + [yang_metadata_path]) if self._debug: self._debug("Generating jtox file '%s' by executing command '%s'" % (jtox_file_path, " ".join(sys.argv))) try: self._pyang_module.run() except SystemExit: pass except Exception as e: shutil.rmtree( os.path.realpath(os.path.expanduser(tmp_dir_path)), ignore_errors=True, ) raise ValueError( "Error while generating intermediate (jtox) file: %s" % e) finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(tmp_dir_path)), ignore_errors=True, ) raise ValueError( "Error while generating intermediate (jtox) file: %s" % err) json2xml_exec_path = find_file_in_path("json2xml") json2xml_module = load_from_source(json2xml_exec_path, "json2xml") # fill in the sys args before invoking json2xml sys.argv = [ json2xml_exec_path, "-t", self._doctype, "-o", xml_file_path, jtox_file_path, json_file_path, ] if self._debug: self._debug("Generating xml file '%s' by executing command '%s'" % (xml_file_path, " ".join(sys.argv))) try: json2xml_module.main() with open(xml_file_path, "r+") as fp: b_content = fp.read() content = to_text(b_content, errors="surrogate_or_strict") except UnicodeError as uni_error: raise ValueError("Error while translating to text: %s" % str(uni_error)) except SystemExit: pass finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(tmp_dir_path)), ignore_errors=True, ) raise ValueError("Error while translating to xml: %s" % err) sys.argv = saved_arg sys.stdout = saved_stdout sys.stderr = saved_stderr try: content = re.sub(r"<\? ?xml .*\? ?>", "", content) root = etree.fromstring(content) except Exception as e: raise ValueError("Error while reading xml document: %s" % e) finally: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(tmp_dir_path)), ignore_errors=True, ) return etree.tostring(root).decode("utf-8")
def _create_wrapper(): return StringIO(text_as_string)
def json_to_xml(self, json_data): """ The method translates JSON data encoded as per YANG model (RFC 7951) to XML payload :param json_data: JSON data that should to translated to XML :return: XML data in string format. """ saved_arg = deepcopy(sys.argv) saved_stdout = sys.stdout saved_stderr = sys.stderr sys.stdout = sys.stderr = StringIO() plugin_instance = str(uuid.uuid4()) plugindir = unfrackpath(JSON2XML_DIR_PATH) makedirs_safe(plugindir) makedirs_safe(os.path.join(plugindir, plugin_instance)) jtox_file_path = os.path.join( JSON2XML_DIR_PATH, plugin_instance, "%s.%s" % (str(uuid.uuid4()), "jtox"), ) xml_file_path = os.path.join( JSON2XML_DIR_PATH, plugin_instance, "%s.%s" % (str(uuid.uuid4()), "xml"), ) jtox_file_path = os.path.realpath(os.path.expanduser(jtox_file_path)) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) yang_metada_dir = os.path.join( os.path.dirname(os.path.abspath(__file__)), "files/yang") yang_metadata_path = os.path.join(yang_metada_dir, "nc-op.yang") self._search_path += ":%s" % yang_metada_dir # fill in the sys args before invoking pyang sys.argv = ([ self._pyang_exec_path, "-f", "jtox", "-o", jtox_file_path, "-p", self._search_path, "--lax-quote-checks", ] + self._yang_files + [yang_metadata_path]) try: self._pyang_exec.run() except SystemExit: pass except Exception as e: temp_dir = os.path.join(JSON2XML_DIR_PATH, plugin_instance) shutil.rmtree( os.path.realpath(os.path.expanduser(temp_dir)), ignore_errors=True, ) raise AnsibleError( "Error while generating intermediate (jtox) file: %s" % e) finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: temp_dir = os.path.join(JSON2XML_DIR_PATH, plugin_instance) shutil.rmtree( os.path.realpath(os.path.expanduser(temp_dir)), ignore_errors=True, ) raise AnsibleError( "Error while generating intermediate (jtox) file: %s" % err) json2xml_exec_path = find_file_in_path("json2xml") json2xml_exec = imp.load_source("json2xml", json2xml_exec_path) # fill in the sys args before invoking json2xml sys.argv = [ json2xml_exec_path, "-t", self._doctype, "-o", xml_file_path, jtox_file_path, json_data, ] try: json2xml_exec.main() with open(xml_file_path, "r+") as fp: content = fp.read() except SystemExit: pass finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: temp_dir = os.path.join(JSON2XML_DIR_PATH, plugin_instance) shutil.rmtree( os.path.realpath(os.path.expanduser(temp_dir)), ignore_errors=True, ) raise AnsibleError("Error while translating to xml: %s" % err) sys.argv = saved_arg sys.stdout = saved_stdout sys.stderr = saved_stderr try: content = re.sub(r"<\? ?xml .*\? ?>", "", content) root = etree.fromstring(content) except Exception as e: raise AnsibleError("Error while reading xml document: %s" % e) finally: if not self._keep_tmp_files: temp_dir = os.path.join(JSON2XML_DIR_PATH, plugin_instance) shutil.rmtree( os.path.realpath(os.path.expanduser(temp_dir)), ignore_errors=True, ) return etree.tostring(root)
def run(self, terms, variables, **kwargs): res = [] try: json_config = terms[0] except IndexError: raise AnsibleError("path to json file must be specified") try: yang_file = kwargs['yang_file'] except KeyError: raise AnsibleError("value of 'yang_file' must be specified") yang_file = os.path.realpath(os.path.expanduser(yang_file)) if not os.path.isfile(yang_file): # Maybe we are passing a glob? yang_files = glob.glob(yang_file) if not yang_files: # Glob returned no files raise AnsibleError('%s invalid file path' % yang_file) else: yang_files = [yang_file] search_path = kwargs.pop('search_path', '') keep_tmp_files = kwargs.pop('keep_tmp_files', False) abs_search_path = None for path in search_path.split(':'): path = os.path.realpath(os.path.expanduser(path)) if abs_search_path is None: abs_search_path = path else: abs_search_path += ':' + path if path is not '' and not os.path.isdir(path): raise AnsibleError('%s is invalid directory path' % path) search_path = abs_search_path json_config = os.path.realpath(os.path.expanduser(json_config)) try: # validate json with open(json_config) as fp: json.load(fp) except Exception as exc: raise AnsibleError("Failed to load json configuration: %s" % (to_text(exc, errors='surrogate_or_strict'))) root_node = kwargs.get('root', 'config') base_pyang_path = sys.modules['pyang'].__file__ pyang_exec_path = find_file_in_path('pyang') pyang_exec = imp.load_source('pyang', pyang_exec_path) saved_arg = deepcopy(sys.argv) sys.modules['pyang'].__file__ = base_pyang_path saved_stdout = sys.stdout saved_stderr = sys.stderr sys.stdout = sys.stderr = StringIO() plugindir = unfrackpath(JSON2XML_DIR_PATH) makedirs_safe(plugindir) jtox_file_path = os.path.join(JSON2XML_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'jtox')) xml_file_path = os.path.join(JSON2XML_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'xml')) jtox_file_path = os.path.realpath(os.path.expanduser(jtox_file_path)) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) # fill in the sys args before invoking pyang sys.argv = [ pyang_exec_path, '-f', 'jtox', '-o', jtox_file_path, '-p', search_path, "--lax-quote-checks" ] + yang_files try: pyang_exec.run() except SystemExit: pass except Exception as e: shutil.rmtree(os.path.realpath( os.path.expanduser(JSON2XML_DIR_PATH)), ignore_errors=True) raise AnsibleError( 'Error while generating intermediate (jtox) file: %s' % e) finally: err = sys.stderr.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(JSON2XML_DIR_PATH)), ignore_errors=True) raise AnsibleError( 'Error while generating intermediate (jtox) file: %s' % err) json2xml_exec_path = find_file_in_path('json2xml') json2xml_exec = imp.load_source('json2xml', json2xml_exec_path) # fill in the sys args before invoking json2xml sys.argv = [ json2xml_exec_path, '-t', root_node, '-o', xml_file_path, jtox_file_path, json_config ] try: json2xml_exec.main() with open(xml_file_path, 'r+') as fp: content = fp.read() except SystemExit: pass finally: err = sys.stderr.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(JSON2XML_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while translating to xml: %s' % err) sys.argv = saved_arg sys.stdout = saved_stdout sys.stderr = saved_stderr try: content = re.sub(r'<\? ?xml .*\? ?>', '', content) root = etree.fromstring(content) except Exception as e: raise AnsibleError('Error while reading xml document: %s' % e) finally: if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(JSON2XML_DIR_PATH)), ignore_errors=True) res.append(etree.tostring(root)) return res
def request_body(): return StringIO('TESTS')
def xml_to_json(self, xml_data): """ The method translates XML data to JSON data encoded as per YANG model (RFC 7951) :param xml_data: XML data or file path containing xml data that should to translated to JSON :return: data in JSON format. """ plugindir = unfrackpath(XM2JSONL_DIR_PATH) makedirs_safe(plugindir) if os.path.isfile(xml_data): # input is xml file path xml_file_path = os.path.realpath(os.path.expanduser(xml_data)) else: # input is xml string, copy it to file in temporary location xml_file_path = os.path.join(XM2JSONL_DIR_PATH, "%s.%s" % (str(uuid.uuid4()), "xml")) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) with open(xml_file_path, "w") as f: if not xml_data.startswith("<?xml version"): xml_data = ('<?xml version="1.0" encoding="UTF-8"?>\n' + xml_data) data = xml_data f.write(data) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) try: # validate xml etree.parse(xml_file_path) display.vvvv("Parsing xml data from temporary file: %s" % xml_file_path) except Exception as exc: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True, ) raise AnsibleError("Failed to load xml data: %s" % (to_text(exc, errors="surrogate_or_strict"))) base_pyang_path = sys.modules["pyang"].__file__ pyang_exec_path = find_file_in_path("pyang") pyang_exec = imp.load_source("pyang", pyang_exec_path) saved_arg = deepcopy(sys.argv) sys.modules["pyang"].__file__ = base_pyang_path saved_stdout = sys.stdout saved_stderr = sys.stderr sys.stdout = sys.stderr = StringIO() xsl_file_path = os.path.join(XM2JSONL_DIR_PATH, "%s.%s" % (str(uuid.uuid4()), "xsl")) json_file_path = os.path.join(XM2JSONL_DIR_PATH, "%s.%s" % (str(uuid.uuid4()), "json")) xls_file_path = os.path.realpath(os.path.expanduser(xsl_file_path)) json_file_path = os.path.realpath(os.path.expanduser(json_file_path)) # fill in the sys args before invoking pyang sys.argv = [ pyang_exec_path, "-f", "jsonxsl", "-o", xls_file_path, "-p", self._search_path, "--lax-quote-checks", ] + self._yang_files display.display( "Generating xsl file '%s' by executing command '%s'" % (xls_file_path, " ".join(sys.argv)), log_only=True, ) try: pyang_exec.run() except SystemExit: pass except Exception as e: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True, ) raise AnsibleError( "Error while generating intermediate (xsl) file: %s" % e) finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: shutil.rmtree( os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True, ) raise AnsibleError( "Error while generating (xsl) intermediate file: %s" % err) xsltproc_exec_path = find_file_in_path("xsltproc") # fill in the sys args before invoking xsltproc sys.argv = [ xsltproc_exec_path, "-o", json_file_path, xsl_file_path, xml_file_path, ] display.display( "Generating json data in temp file '%s' by executing command '%s'" % (json_file_path, " ".join(sys.argv)), log_only=True, ) time.sleep(5) try: os.system(" ".join(sys.argv)) except SystemExit: pass finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: shutil.rmtree( os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True, ) raise AnsibleError("Error while translating to json: %s" % err) sys.argv = saved_arg sys.stdout = saved_stdout sys.stderr = saved_stderr try: display.vvvv("Reading output json data from temporary file: %s" % json_file_path) with open(json_file_path, "r") as fp: raw_content = fp.read() content = json.loads(raw_content) except Exception as e: raise AnsibleError( "Error while reading json document %s with content %s" % (e, raw_content)) finally: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True, ) return content
def xml_to_json(self, xml_data, tmp_dir_path): """ The method translates XML data to JSON data encoded as per YANG model (RFC 7951) :param xml_data: XML data or file path containing xml data that should to translated to JSON :param tmp_dir_path: Temporary directory path to copy intermediate files :return: data in JSON format. """ try: etree.fromstring(xml_data) # input is xml string, copy it to file in temporary location xml_file_path = os.path.join(tmp_dir_path, "%s.%s" % (str(uuid.uuid4()), "xml")) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) with open(xml_file_path, "w") as f: if not xml_data.startswith("<?xml version"): xml_data = ('<?xml version="1.0" encoding="UTF-8"?>\n' + xml_data) data = xml_data f.write(data) except etree.XMLSyntaxError: if os.path.isfile(xml_data): # input is xml file path xml_file_path = os.path.realpath(os.path.expanduser(xml_data)) else: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(tmp_dir_path)), ignore_errors=True, ) raise ValueError("Unable to create file or read XML data %s" % xml_data) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) if os.path.isfile(xml_data): try: # validate xml etree.parse(xml_file_path) if self._debug: self._debug("Parsing xml data from temporary file: %s" % xml_file_path) except Exception as exc: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(tmp_dir_path)), ignore_errors=True, ) raise ValueError("Failed to load xml data: %s" % (to_text(exc, errors="surrogate_or_strict"))) base_pyang_path = sys.modules["pyang"].__file__ pyang_exec_path = find_file_in_path("pyang") saved_arg = deepcopy(sys.argv) sys.modules["pyang"].__file__ = base_pyang_path saved_stdout = sys.stdout saved_stderr = sys.stderr sys.stdout = sys.stderr = StringIO() jsonxsl_relative_dirpath = os.path.join("yang", "xslt") jsonxsl_dir_path = find_share_path( os.path.join(jsonxsl_relative_dirpath, "jsonxsl-templates.xsl")) if jsonxsl_dir_path is None: raise ValueError( "Could not find jsonxsl-templates.xsl in environment path") os.environ["PYANG_XSLT_DIR"] = os.path.join(jsonxsl_dir_path, jsonxsl_relative_dirpath) xsl_file_path = os.path.join(tmp_dir_path, "%s.%s" % (str(uuid.uuid4()), "xsl")) json_file_path = os.path.join(tmp_dir_path, "%s.%s" % (str(uuid.uuid4()), "json")) xls_file_path = os.path.realpath(os.path.expanduser(xsl_file_path)) json_file_path = os.path.realpath(os.path.expanduser(json_file_path)) # fill in the sys args before invoking pyang sys.argv = [ pyang_exec_path, "-f", "jsonxsl", "-o", xls_file_path, "-p", self._search_path, "--lax-quote-checks", ] + self._yang_files if self._debug: self._debug("Generating xsl file '%s' by executing command '%s'" % (xls_file_path, " ".join(sys.argv))) try: self._pyang_module.run() except SystemExit: pass except Exception as e: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(tmp_dir_path)), ignore_errors=True, ) raise ValueError( "Error while generating intermediate (xsl) file: %s" % e) finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(tmp_dir_path)), ignore_errors=True, ) raise ValueError( "Error while generating (xsl) intermediate file: %s" % err) xsltproc_exec_path = find_file_in_path("xsltproc") if not xsltproc_exec_path: raise ValueError( "xsltproc executable not found." " Install 'libxml2-dev' and 'libxslt-dev' packages") # fill in the sys args before invoking xsltproc sys.argv = [ xsltproc_exec_path, "-o", json_file_path, xsl_file_path, xml_file_path, ] if self._debug: self._debug( "Generating json data in temp file '%s' by executing command '%s'" % (json_file_path, " ".join(sys.argv))) time.sleep(5) try: os.system(" ".join(sys.argv)) except SystemExit: pass finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(tmp_dir_path)), ignore_errors=True, ) raise ValueError("Error while translating to json: %s" % err) sys.argv = saved_arg sys.stdout = saved_stdout sys.stderr = saved_stderr try: if self._debug: self._debug( "Reading output json data from temporary file: %s" % json_file_path) with open(json_file_path, "r") as fp: raw_content = fp.read() content = json.loads(raw_content) except Exception as e: raise ValueError( "Error while reading json document %s from path %s" % (e, json_file_path)) finally: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(tmp_dir_path)), ignore_errors=True, ) return content
def run(self, terms, variables, **kwargs): res = [] output = {} try: yang_file = terms[0] except IndexError: raise AnsibleError('the yang file must be specified') yang_file = os.path.realpath(os.path.expanduser(yang_file)) if not os.path.isfile(yang_file): raise AnsibleError('%s invalid file path' % yang_file) search_path = kwargs.pop('search_path', '') annotations = kwargs.pop('annotations', '') for path in search_path.split(':'): path = os.path.realpath(os.path.expanduser(path)) if path is not '' and not os.path.isdir(path): raise AnsibleError('%s is invalid directory path' % path) keep_tmp_files = kwargs.pop('keep_tmp_files', False) defaults = kwargs.pop('defaults', False) doctype = kwargs.pop('doctype', 'config') valid_doctype = ['config', 'data'] if doctype not in valid_doctype: raise AnsibleError('doctpe value %s is invalid, valid value are %s' % (path, ', '.join(valid_doctype))) pyang_exec_path = find_file_in_path('pyang') saved_arg = deepcopy(sys.argv) sys.stdout = sys.stderr = StringIO() plugindir = unfrackpath(YANG_SPEC_DIR_PATH) makedirs_safe(plugindir) tree_file_path = os.path.join(YANG_SPEC_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'txt')) xml_file_path = os.path.join(YANG_SPEC_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'xml')) json_file_path = os.path.join(YANG_SPEC_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'json')) tree_file_path = os.path.realpath(os.path.expanduser(tree_file_path)) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) json_file_path = os.path.realpath(os.path.expanduser(json_file_path)) # fill in the sys args before invoking pyang to retrieve xml skeleton sample_xml_skeleton_cmd = [pyang_exec_path, '-f', 'sample-xml-skeleton', '-o', xml_file_path, yang_file, '-p', search_path, "--sample-xml-skeleton-doctype", doctype, "--lax-quote-checks"] if defaults: sample_xml_skeleton_cmd.append("--sample-xml-skeleton-defaults") if annotations: sample_xml_skeleton_cmd.append("--sample-xml-skeleton-annotations") try: subprocess.check_output(' '.join(sample_xml_skeleton_cmd), stderr=subprocess.STDOUT, shell=True) except SystemExit: pass except Exception as e: if not keep_tmp_files: shutil.rmtree(os.path.realpath(os.path.expanduser(YANG_SPEC_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while generating skeleton xml file: %s' % e) finally: err = sys.stdout.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath(os.path.expanduser(YANG_SPEC_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while generating skeleton xml file: %s' % err) sys.stdout.flush() sys.stderr.flush() # fill in the sys args before invoking pyang to retrieve tree structure tree_cmd = [pyang_exec_path, '-f', 'tree', '-o', tree_file_path, yang_file, '-p', search_path, "--lax-quote-checks"] try: subprocess.check_output(' '.join(tree_cmd), stderr=subprocess.STDOUT, shell=True) except SystemExit: pass except Exception as e: if not keep_tmp_files: shutil.rmtree(os.path.realpath(os.path.expanduser(YANG_SPEC_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while generating tree file: %s' % e) finally: err = sys.stdout.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath(os.path.expanduser(YANG_SPEC_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while generating tree file: %s' % err) sys.stdout.flush() sys.stderr.flush() plugin_file_src = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'yang_spec.py') shutil.copy(plugin_file_src, plugindir) # fill in the sys args before invoking pyang to retrieve json skeleton sample_json_skeleton_cmd = [pyang_exec_path, '--plugindir', plugindir, '-f', 'sample-json-skeleton', '-o', json_file_path, yang_file, '-p', search_path, '--lax-quote-checks', '--sample-json-skeleton-doctype', doctype] if defaults: sample_json_skeleton_cmd.append("--sample-json-skeleton-defaults") try: subprocess.check_output(' '.join(sample_json_skeleton_cmd), stderr=subprocess.STDOUT, shell=True) except SystemExit: pass except Exception as e: if not keep_tmp_files: shutil.rmtree(os.path.realpath(os.path.expanduser(YANG_SPEC_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while generating skeleton json file: %s' % e) finally: err = sys.stdout.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath(os.path.expanduser(YANG_SPEC_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while generating tree json: %s' % err) with open(tree_file_path, 'r') as f: output['tree'] = f.read() with open(xml_file_path, 'r') as f: output['xml_skeleton'] = f.read() with open(json_file_path, 'r') as f: output['json_skeleton'] = json.load(f) if not keep_tmp_files: shutil.rmtree(plugindir, ignore_errors=True) res.append(output) sys.argv = saved_arg return res
def csv_to_list(rawcsv): reader_raw = csv.DictReader(StringIO(rawcsv)) reader = [ dict((k, v.strip()) for k, v in row.items()) for row in reader_raw ] return list(reader)
The configuration returned will always be in the same format of the parameters above. after: description: The resulting configuration model invocation. returned: when changed sample: > The configuration returned will always be in the same format of the parameters above. commands: description: The set of commands pushed to the remote device. returned: always type: list sample: ['command 1', 'command 2', 'command 3'] """ output = StringIO() RM_DIR_PATH = "~/.ansible/tmp/resource_model" def to_list(val): if isinstance(val, (list, tuple, set)): return list(val) elif val is not None: return [val] return list() def add(line, spaces=0, newline=True): line = line.rjust(len(line) + spaces, ' ') if newline: output.write(line + '\n')
def _raise_401(): raise HTTPError(url=url, code=401, msg='Unauthorized', hdrs='', fp=StringIO(''))
def generate_json_schema(self, schema_out_path=None, defaults=False): """ This method generates json schema by parsing the yang file and stores the content of json schema into a file (optional) :param schema_out_path: This option provide the file path to store the generated. :param defaults: If set to True the default values will be added in json schema from the YANG model for the corresponding option. :return: JSON schema in string format. """ saved_arg = deepcopy(sys.argv) sys.stdout = sys.stderr = StringIO() json_tmp_file_path = os.path.join( self._tmp_dir_path, "%s.%s" % (str(uuid.uuid4()), "json")) json_tmp_file_path = os.path.realpath( os.path.expanduser(json_tmp_file_path)) plugin_file_src = os.path.join( os.path.dirname(os.path.abspath(__file__)), "../pyang/plugins/json_skeleton_plugin.py", ) shutil.copy(plugin_file_src, self._tmp_dir_path) # fill in the sys args before invoking pyang to retrieve json skeleton sample_json_skeleton_cmd = [ self._pyang_exec_path, "--plugindir", self._tmp_dir_path, "-f", "sample-json-skeleton", "-o", json_tmp_file_path, "-p", self._search_path, "--lax-quote-checks", "--sample-json-skeleton-doctype", self._doctype, ] + self._yang_file_path if defaults: sample_json_skeleton_cmd.append("--sample-json-skeleton-defaults") try: subprocess.check_output( " ".join(sample_json_skeleton_cmd), stderr=subprocess.STDOUT, shell=True, ) except SystemExit: pass except Exception as e: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(self._tmp_dir_path)), ignore_errors=True, ) raise ValueError("Error while generating skeleton json file: %s" % e.output) finally: err = sys.stdout.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser( self._tmp_dir_path)), ignore_errors=True, ) raise ValueError("Error while generating json schema: %s" % err) sys.stdout.flush() sys.stderr.flush() sys.argv = saved_arg with open(json_tmp_file_path, "r") as f: json_schema = json.load(f) if schema_out_path: try: shutil.copy(json_tmp_file_path, schema_out_path) except IOError as e: # ENOENT(2): file does not exist, raised also on missing dest parent dir if e.errno != errno.ENOENT: raise # try creating parent directories os.makedirs(os.path.dirname(schema_out_path)) shutil.copyfile(json_tmp_file_path, schema_out_path) if not self._keep_tmp_files: os.remove(json_tmp_file_path) return json_schema
def generate_xml_schema(self, schema_out_path=None, defaults=False, annotations=False): """ This method generates XML schema by parsing the yang file and stores the content of XML schema into a file (optional) :param schema_out_path: This option provide the file path to store the generated. :param defaults: If set to True the default values will be added in XML schema from the YANG model for the corresponding option. :param annotations: The boolean flag identifies if the XML skeleton should have comments describing the field or not. :return: XML scehma in string format. """ saved_arg = deepcopy(sys.argv) sys.stdout = sys.stderr = StringIO() xml_tmp_file_path = os.path.join(self._tmp_dir_path, "%s.%s" % (str(uuid.uuid4()), "xml")) xml_tmp_file_path = os.path.realpath( os.path.expanduser(xml_tmp_file_path)) # fill in the sys args before invoking pyang to retrieve xml skeleton sample_xml_skeleton_cmd = [ self._pyang_exec_path, "-f", "sample-xml-skeleton", "-o", xml_tmp_file_path, "-p", self._search_path, "--sample-xml-skeleton-doctype", self._doctype, "--lax-quote-checks", ] + self._yang_file_path if defaults: sample_xml_skeleton_cmd.append("--sample-xml-skeleton-defaults") if annotations: sample_xml_skeleton_cmd.append("--sample-xml-skeleton-annotations") try: subprocess.check_output( " ".join(sample_xml_skeleton_cmd), stderr=subprocess.STDOUT, shell=True, ) except SystemExit: pass except Exception as e: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(self._tmp_dir_path)), ignore_errors=True, ) raise ValueError("Error while generating skeleton xml file: %s" % e.output) finally: err = sys.stdout.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser( self._tmp_dir_path)), ignore_errors=True, ) raise ValueError( "Error while generating skeleton xml file: %s" % err) sys.stdout.flush() sys.stderr.flush() sys.argv = saved_arg with open(xml_tmp_file_path, "r") as f: xml_schema = f.read() if schema_out_path: try: shutil.copy(xml_tmp_file_path, schema_out_path) except IOError as e: # ENOENT(2): file does not exist, raised also on missing dest parent dir if e.errno != errno.ENOENT: raise # try creating parent directories os.makedirs(os.path.dirname(schema_out_path)) shutil.copyfile(xml_tmp_file_path, schema_out_path) if not self._keep_tmp_files: os.remove(xml_tmp_file_path) return xml_schema