def fake_getresultstree(sself, taskspec): """Fakt getresultstree. Change state of last recipe on 3rd loop. Args: sself: BeakerRunner taskspec: ID of the job, recipe or recipe set. Returns: xml root """ if alt_state: if fake_getresultstree.run_count > 2: result = fromstring(get_asset_content(xml_asset_file)) recipe = result.findall('.//recipe')[-1] recipe.attrib['status'] = alt_state sself.recipe_set_results[taskspec] = result return result fake_getresultstree.run_count += 1 result = fromstring(get_asset_content(xml_asset_file)) sself.recipe_set_results[taskspec] = result return result
def _add_comment(self, dom, commentdata): addcomment = dom.find('.//h:body/h:a', _NS) href = addcomment.attrib['href'] rawresponse = self._session.get(href) if rawresponse.status_code != 200: return NOTWORKING dom = fromstring(rawresponse.content) form = dom.find('.//h:form', _NS) data = dict() hidden = form.findall('.//h:input[@type="hidden"]', _NS) for hid in hidden: data[hid.attrib['name']] = hid.attrib.get('value', '') data['5'] = commentdata.name data['6'] = commentdata.text data['7'] = 'Create' rawresponse = self._session.post('http://%s/seaside/doodle' % self._ip, data=data) if rawresponse.status_code != 200: return NOTWORKING dom = fromstring(rawresponse.content) return OK
def is_safe_xml(text): try: fromstring(text, forbid_dtd=True, forbid_entities=True, forbid_external=True) except Exception as e: print(e) return False return True
def get_chat_status_and_css(name): """ Get the chat status and css for Ask a Librarian pages. Args: name: string, the name of the chat widget you wish to retrieve. Possible values include: uofc-ask, law, crerar, and ssa. Returns: Tuple representing the chat status for Ask a Librarian pages where the first item is a boolean and the second item is a string (css class). """ try: xml = requests.get('https://us.libraryh3lp.com/presence/jid/' \ + name + '/chat.libraryh3lp.com/xml', timeout=12) tree = fromstring(xml.content) except requests.exceptions.Timeout: xml = "<presence user='******' server='chat.libraryh3lp.com'><resource show='unavailable' name='libraryh3lp' priority='5'/></presence>" tree = fromstring(xml) status_lookup = {True: 'active', False: 'off'} status_bool = tree.find('resource').attrib['show'] == 'available' return (status_bool, status_lookup[status_bool])
def to_xml(text): try: if PY2: # On python2, fromstring expects an encoded string return fromstring((text[BOM_LEN:] if text.startswith(BOM) else text).encode('utf-8')) return fromstring(text[BOM_LEN:] if text.startswith(BOM) else text) except ParseError: # Exchange servers may spit out the weirdest XML. lxml is pretty good at recovering from errors log.warning('Fallback to lxml processing of faulty XML') magical_parser = XMLParser(recover=True, resolve_entities=False) magical_parser.set_element_class_lookup(ElementDefaultClassLookup(element=RestrictedElement)) no_bom_text = text[BOM_LEN:] if text.startswith(BOM) else text try: root = parse(io.BytesIO(no_bom_text.encode('utf-8')), parser=magical_parser) except AssertionError as e: raise ParseError(*e.args) try: return fromstring(tostring(root)) except ParseError as e: if hasattr(e, 'position'): e.lineno, e.offset = e.position if not e.lineno: raise ParseError('%s' % text_type(e)) try: offending_line = no_bom_text.splitlines()[e.lineno - 1] except IndexError: raise ParseError('%s' % text_type(e)) else: offending_excerpt = offending_line[max(0, e.offset - 20):e.offset + 20] raise ParseError('%s\nOffending text: [...]%s[...]' % (text_type(e), offending_excerpt)) except TypeError: raise ParseError('This is not XML: %s' % text)
def test_get_changeset_list(self): teststr = u"Съешь же ещё этих мягких французских булок да выпей чаю" cs = CreateTestChangeset(self.user, tags={"foo": "bar", 'test': teststr}, bbox=(-1.0893202,50.7942715,-1.0803509,50.7989372), is_open=True, open_timestamp=int(time.time())-60) cs2 = CreateTestChangeset(self.user, tags={"foo": "bar", 'test': teststr}, bbox=(-1.0893202,50.7942715,-1.0803509,50.7989372), is_open=False, open_timestamp=int(time.time())-120) anonClient = Client() response = anonClient.get(reverse('changeset:list')) self.assertEqual(response.status_code, 200) xml = fromstring(response.content) CheckChangesetListContainsId(self, xml, cs.objId, True) CheckChangesetListContainsId(self, xml, cs2.objId, True) response = anonClient.get(reverse('changeset:list')+"?open=true") self.assertEqual(response.status_code, 200) xml = fromstring(response.content) CheckChangesetListContainsId(self, xml, cs.objId, True) CheckChangesetListContainsId(self, xml, cs2.objId, False) response = anonClient.get(reverse('changeset:list')+"?closed=true") self.assertEqual(response.status_code, 200) xml = fromstring(response.content) CheckChangesetListContainsId(self, xml, cs.objId, False) CheckChangesetListContainsId(self, xml, cs2.objId, True)
def __blacklist_hreq(self, host_requires): """ Make sure recipe excludes blacklisted hosts. Args: host_requires: etree node representing "hostRequires" node from the recipe. Returns: Modified "hostRequires" etree node. """ if host_requires.get('force'): # don't add blacklist if the host is forced return host_requires and_node = host_requires.find('and') if and_node is None: and_node = fromstring('<and />') host_requires.append(and_node) invalid_entries_reported = False for disabled in self.blacklisted: try: hostname = fromstring(f'<hostname op="!=" value="{disabled}" ' f'/>') and_node.append(hostname) except ParseError: # do not accept or try to quote any html/xml values; only # plaintext values like "host1" are accepted if not invalid_entries_reported: logging.info('The blacklist or a part of it is invalid!') invalid_entries_reported = True return host_requires
def test_changeset_auto_close_active(self): teststr = u"Съешь же ещё этих мягких французских булок да выпей чаю" cs = CreateTestChangeset(self.user, tags={"foo": "bar", 'test': teststr}, bbox=(-1.0893202,50.7942715,-1.0803509,50.7989372), is_open=True, open_timestamp=int(time.time())-60) cs2 = CreateTestChangeset(self.user, tags={"foo": "bar", 'test': teststr}, bbox=(-1.0893202,50.7942715,-1.0803509,50.7989372), is_open=False, open_timestamp=int(time.time())-120) cs3 = CreateTestChangeset(self.user, tags={"foo": "bar", 'test': teststr}, bbox=(-1.0893202,50.7942715,-1.0803509,50.7989372), is_open=True, open_timestamp=int(time.time())-(36*60*60)) cs4 = CreateTestChangeset(self.user, tags={"foo": "bar", 'test': teststr}, bbox=(-1.0893202,50.7942715,-1.0803509,50.7989372), is_open=False, open_timestamp=int(time.time())-(36*60*60)) cmd = closeoldchangesets.Command() cmd.handle([], {}) anonClient = Client() response = anonClient.get(reverse('changeset:changeset', args=(cs.objId,))) self.assertEqual(response.status_code, 200) xml = fromstring(response.content) self.assertEqual(xml.tag, "osm") csout = xml.find("changeset") self.assertEqual(int(csout.attrib["id"]) == cs.objId, True) self.assertEqual(csout.attrib["open"], "true") response = anonClient.get(reverse('changeset:changeset', args=(cs2.objId,))) self.assertEqual(response.status_code, 200) xml = fromstring(response.content) self.assertEqual(xml.tag, "osm") csout = xml.find("changeset") self.assertEqual(int(csout.attrib["id"]) == cs2.objId, True) self.assertEqual(csout.attrib["open"], "false") response = anonClient.get(reverse('changeset:changeset', args=(cs3.objId,))) self.assertEqual(response.status_code, 200) xml = fromstring(response.content) self.assertEqual(xml.tag, "osm") csout = xml.find("changeset") self.assertEqual(int(csout.attrib["id"]) == cs3.objId, True) self.assertEqual(csout.attrib["open"], "false") response = anonClient.get(reverse('changeset:changeset', args=(cs4.objId,))) self.assertEqual(response.status_code, 200) xml = fromstring(response.content) self.assertEqual(xml.tag, "osm") csout = xml.find("changeset") self.assertEqual(int(csout.attrib["id"]) == cs4.objId, True) self.assertEqual(csout.attrib["open"], "false")
def add_asset_grp(self, access_req): scanner_id = self.get_scanners() self.url = self.qualys_host + "/api/2.0/fo/asset/group/" if scanner_id is not None: params = { 'action': 'add', 'ips': access_req['ip'], 'title': access_req['site_name'], 'appliance_ids': scanner_id } # print(self.url) response_asset_grp_add = self.makeRequest(params) # print(response_asset_grp_add.content) responseXML = response_asset_grp_add.content tree = ElementTree(fromstring(responseXML)) root = tree.getroot() asset_response = root.find('RESPONSE') asset_status = asset_response.find('TEXT').text if asset_status == "Asset Group successfully added.": Utilities.printSuccess("Asset group added to Qualys Scanner") return True else: Utilities.printError("Asset group addition Failure: " + asset_status) Utilities.printLog("Skipping remaning Qualys tasks..") return False else: Utilities.printError( "Asset Group adition Failure: Scanner id not found") return False
def add_asset(self, access_req): self.url = self.qualys_host + "/api/2.0/fo/asset/ip/" params = {'action': 'add', 'ips': access_req['ip'], 'enable_vm': '1'} max_login_try_limit = 2 while True: # Login check done here, if it fails here then rest all task is skipped if (self.login_try > 0) and (self.login_try < max_login_try_limit): self.uname = input("Please enter your username for " + " Qualys" + ": ") self.passwd = input("Please enter your password for " + " Qualys" + ": ") elif self.login_try >= max_login_try_limit: Utilities.printError( "Qualys login attemts exceded maximum limit, skipping Qualys tasks.." ) return False response_aasset_add = self.makeRequest(params) # print(response_aasset_add.content) responseXML = response_aasset_add.content tree = ElementTree(fromstring(responseXML)) root = tree.getroot() asset_response = root.find('RESPONSE') asset_status = asset_response.find('TEXT').text if asset_status == "IPs successfully added to Vulnerability Management": Utilities.printSuccess("Asset added to Qualys Scanner") return True elif asset_status == "Bad Login/Password": Utilities.printError("Qualys login failed..") self.login_try += 1 else: Utilities.printError("Asset adition Failure: " + asset_status) Utilities.printLog("Skipping remaning Qualys tasks..") return False
def from_response(cls, resp, ns) -> List["FlowItem"]: all_flow_items = list() parsed_response = fromstring(resp) all_flow_xml = parsed_response.findall(".//t:flow", namespaces=ns) for flow_xml in all_flow_xml: ( id_, name, description, webpage_url, created_at, updated_at, tags, project_id, project_name, owner_id, ) = cls._parse_element(flow_xml, ns) flow_item = cls(project_id) flow_item._set_values( id_, name, description, webpage_url, created_at, updated_at, tags, None, project_name, owner_id, ) all_flow_items.append(flow_item) return all_flow_items
def _parse_common_elements(self, flow_xml, ns): if not isinstance(flow_xml, ET.Element): flow_xml = fromstring(flow_xml).find(".//t:flow", namespaces=ns) if flow_xml is not None: ( _, _, _, _, _, updated_at, _, project_id, project_name, owner_id, ) = self._parse_element(flow_xml, ns) self._set_values( None, None, None, None, None, updated_at, None, project_id, project_name, owner_id, ) return self
def extract_mpu_parts(body: AnyStr, xmlns: str = "http://s3.amazonaws.com/doc/2006-03-01/"): """Extract part data from an XML-formatted CompleteMultipartUpload request. This function parses the request body used by this operation: https://docs.aws.amazon.com/AmazonS3/latest/API/API_CompleteMultipartUpload.html Arguments: body (str) Body of incoming request; expected to be a valid XML document. xmlns (str) Namespace used by the XML document. Returns: list[dict] A list of dicts in the format used for ``Parts`` in the boto s3 client's ``complete_multipart_upload`` method, e.g. [{"PartNumber": 1, "ETag": "abc123..."}, {"PartNumber": 2, "ETag": "xxxyyy..."}, ...] """ namespaces = {"s3": xmlns} etree = fromstring(body) tags = etree.findall(".//s3:ETag", namespaces) partnums = etree.findall(".//s3:PartNumber", namespaces) return [{ "ETag": tag.text, "PartNumber": int(partnum.text) } for (tag, partnum) in zip(tags, partnums)]
def addSite(self, access_req): # print("TestModule:SiteManagement") siteSaveRequest = Element('SiteSaveRequest', attrib={'session-id': self.session_id}) # print(access_req['site_name']) # Site element have 'S' in caps !!--lost a day on this !! site_elem = SubElement(siteSaveRequest, 'Site', attrib={'name': access_req['site_name'], 'id': '-1'}) host_elem = SubElement(site_elem, 'Hosts') for ip in access_req['ip'].split(','): range_elem = SubElement(host_elem, 'range', attrib={'from': ip, 'to': ''}) scanConfig_elem = SubElement(site_elem, 'ScanConfig', attrib={'name': 'Full audit', 'templateID': 'full-audit'}) xmlTree = ElementTree(siteSaveRequest) f = BytesIO() xmlTree.write(f, encoding='utf-8', xml_declaration=True) # required so that xml declarations will come up in generated XML saveSiteReqXML = f.getvalue().decode("utf-8") # converts bytes to string # print(saveSiteReqXML) responseXML = self.makeRequest(self.reqURL, saveSiteReqXML, self.headers) tree = ElementTree(fromstring(responseXML)) root = tree.getroot() addSiteResponse = root.get('success') if (addSiteResponse == "1"): self.site_id = root.get('site-id') Utilities.printSuccess("Created site with site-id: " + self.site_id) return True else: fa = root.find('Failure') ex = fa.find('Exception') msg = ex.find('message').text Utilities.printError("Site creation failed: " + msg) return False
def login_nexpose(self, scanner_info): # API v1.1 Login and get the session here max_login_try_limit = 2 while True: if self.login_try == 0: xmlReq = Element('LoginRequest', attrib={'user-id': scanner_info['uname'], 'password': scanner_info['passwd']}) elif self.login_try > 0 and self.login_try < max_login_try_limit: usr_name = input("Please enter your username for " + " Nexpose" + ": ") usr_passwd = input("Please enter your password for " + " Nexpose" + ": ") xmlReq = Element('LoginRequest', attrib={'user-id': usr_name, 'password': usr_passwd}) else: Utilities.printError("Nexpose login attemts exceded maximum limit, skipping Nexpose tasks..") return False xmlReq = Element('LoginRequest', attrib={'user-id': scanner_info['uname'], 'password': scanner_info['passwd']}) xmlTree = ElementTree(xmlReq) f = BytesIO() xmlTree.write(f, encoding='utf-8', xml_declaration=True) # required so that xml declarations will come up in generated XML loginReqXML = f.getvalue().decode("utf-8") # converts bytes to string # print(self.loginReqXML) responseXML = self.makeRequest(self.reqURL, loginReqXML, self.headers) tree = ElementTree(fromstring(responseXML)) root = tree.getroot() loginResponse = root.get('success') if (loginResponse == "1"): self.session_id = root.get('session-id') Utilities.printSuccess("Logged in to Nexpose Scanner") return True else: fa = root.find('Failure') ex = fa.find('Exception') msg = ex.find('message').text Utilities.printError("Login Failure: " + msg) self.login_try += 1
def addUser(self, access_req): # print("addUser Module") usrLst = access_req['userList'] for user in usrLst: usrSaveRequest = Element('UserSaveRequest', attrib={'session-id': self.session_id}) userinfo = user.split(',') # uname,name,email pswd = Utilities.gen_code() usrConfig_elem = SubElement(usrSaveRequest, 'UserConfig', attrib={'id': '-1', 'role-name': 'user', 'authsrcid': '-1', 'enabled': '1', 'name': userinfo[0], 'fullname': userinfo[1], 'email': userinfo[2], 'password': pswd}) sites_elem = SubElement(usrConfig_elem, 'UserSite') site_elem = SubElement(sites_elem, 'site', attrib={'id': self.site_id}) site_elem.text = access_req['site_name'] xmlTree = ElementTree(usrSaveRequest) f = BytesIO() xmlTree.write(f, encoding='utf-8', xml_declaration=True) # required so that xml declarations will come up in generated XML usrSaveReqXML = f.getvalue().decode("utf-8") # converts bytes to string # print(usrSaveReqXML) responseXML = self.makeRequest(self.reqURL, usrSaveReqXML, self.headers) # print(responseXML) tree = ElementTree(fromstring(responseXML)) root = tree.getroot() addUserReq = root.get('success') if (addUserReq == "1"): Utilities.printSuccess("Created user: "******"Nexpose\nUsername:"******"\nPassword:"******"User creation failed: " + msg) return False
def test_upload_multi_action(self): cs = CreateTestChangeset(self.user, tags={"foo": "me"}, is_open=True) node = create_node(self.user.id, self.user.username) node2 = create_node(self.user.id, self.user.username, node) way = create_way(self.user.id, self.user.username, [node.objId, node2.objId]) xml = """<osmChange version="0.6" generator="JOSM"> <create> <node id='-3912' changeset='{0}' lat='50.78673385857' lon='-1.04730886255'> <tag k='abc' v='def' /> </node> </create> <modify> <way id='{1}' changeset='{0}' version="{2}"> <nd ref='-3912' /> <nd ref='{3}' /> <nd ref='{4}' /> <tag k='ghi' v='jkl' /> </way> </modify> </osmChange>""".format(cs.objId, way.objId, way.metaData.version, node.objId, node2.objId) response = self.client.post(reverse('changeset:upload', args=(cs.objId,)), xml, content_type='text/xml') if response.status_code != 200: print (response.content) self.assertEqual(response.status_code, 200) xml = fromstring(response.content) diffDict = ParseOsmDiffToDict(xml)
def test_fail_and_skip(self, mock_jobsubmit, mock_getresultstree, mock_warning, mock_error): """ Ensure that a job with failed tasks, no waiving and skipped tests returns SKT_FAIL.""" # pylint: disable=unused-argument beaker_xml = misc.get_asset_content('beaker_skip_and_fail.xml') mock_getresultstree.return_value = fromstring(beaker_xml) mock_jobsubmit.return_value = "J:0001" # no need to wait 60 seconds # though beaker_pass_results.xml only needs one iteration self.myrunner.watchdelay = 0.1 # For the purposes of this test it's not necessary to flip the state # of the fake Beaker XML job to 'Completed', the asset file already has # that state. result = misc.exec_on(self.myrunner, mock_jobsubmit, 'beaker_skip_and_fail.xml', 5, waiving=False) # see method description for details why SKT_FAIL self.assertEqual(SKT_FAIL, result)
def form_valid(self, form): self.importer = Importer(fromstring(form.cleaned_data['xml'])) self.importer.import_tournament() messages.success(self.request, _("Tournament archive has been imported.")) return super().form_valid(form)
def test_upload_delete_single_node(self): cs = CreateTestChangeset(self.user, tags={"foo": "interstellar"}, is_open=True) node = create_node(self.user.id, self.user.username) xml = """<osmChange generator="JOSM" version="0.6"> <delete> <node changeset="{}" id="{}" lat="50.80" lon="-1.05" version="{}"/> </delete> </osmChange>""".format(cs.objId, node.objId, node.metaData.version) response = self.client.post(reverse('changeset:upload', args=(cs.objId, )), xml, content_type='text/xml') if response.status_code != 200: print(response.content) self.assertEqual(response.status_code, 200) xml = fromstring(response.content) self.assertEqual(len(xml), 1) ndiff = xml[0] self.assertEqual(int(ndiff.attrib["old_id"]), node.objId) dbNode = GetObj(p, "node", node.objId) self.assertEqual(dbNode is None, True)
def test_service_provider_certificate(): # compare /etc/univention/ssl/$(hostname -f)/cert.pem with # univention-ldapsearch -LLL "(&(serviceProviderMetadata=*)(univentionObjectType=saml/serviceprovider)(SAMLServiceProviderIdentifier=https://$(hostname -f)/univention/saml/metadata))" serviceProviderMetadata | ldapsearch-wrapper | ldapsearch-decode64 # If it fails: /usr/share/univention-management-console/saml/update_metadata # # fails because https://help.univention.com/t/renewing-the-ssl-certificates/37 was not used. https://help.univention.com/t/renewing-the-complete-ssl-certificate-chain/36 lo = univention.uldap.getMachineConnection() certs = lo.search(filter_format( '(&(serviceProviderMetadata=*)(univentionObjectType=saml/serviceprovider)(SAMLServiceProviderIdentifier=https://%s/univention/saml/metadata))', ['%s.%s' % (ucr.get('hostname'), ucr.get('domainname'))]), attr=['serviceProviderMetadata']) MODULE.process( "Checking certificates of /etc/univention/ssl/%s.%s/cert.pem" % (ucr.get('hostname'), ucr.get('domainname'))) with open('/etc/univention/ssl/%s.%s/cert.pem' % (ucr.get('hostname'), ucr.get('domainname'))) as fd: for cert in certs: cert = find_node( fromstring( cert[1]['serviceProviderMetadata'][0].decode('UTF-8')), '{http://www.w3.org/2000/09/xmldsig#}X509Certificate') if cert.text.strip() not in fd.read(): MODULE.error( 'The certificate of the SAML service provider does not match.' ) raise Critical( _('The certificate of the SAML service provider does not match.' ))
def set_show_id(self): self.log.debug('Retrieving series id for %s', self.show) cache = os.path.join(self.get_cache_dir(self.show), 'show_id') try: if not self.cache: raise IOError with open(cache, 'r') as f: xml = f.read() except IOError: xml = self.request_show_id(cache) try: xml = xml.encode('utf-8') # deal with py2 faff except UnicodeEncodeError: pass self.log.debug('XML: Attempting to parse') try: tree = fromstring(xml) except ParseError: raise errors.InvalidXMLException(self.show) if tree is None or len(tree) is 0: raise errors.InvalidXMLException(self.show) self.log.debug('XML: Parsed') self.show_id, self.show = self.get_show_id_from_xml(tree) self.log.debug('Retrieved show id: %s', self.show_id) self.log.debug('Retrieved canonical show name: %s', self.show)
def test_upload_create_node_way_version_one(self): cs = CreateTestChangeset(self.user, tags={"foo": "me"}, is_open=True) xml = """<osmChange version="0.6" generator="acme osm editor"> <create> <node id="-1" changeset="{0}" version="1" lat="-33.9133123" lon="151.1173123" /> <node id="-2" changeset="{0}" version="1" lat="-33.9233321" lon="151.1173321" /> <way id="-3" changeset="{0}" version="1"> <nd ref="-1"/> <nd ref="-2"/> </way> </create> </osmChange>""".format(cs.objId) response = self.client.post(reverse('changeset:upload', args=(cs.objId,)), xml, content_type='text/xml') if response.status_code != 200: print (response.content) self.assertEqual(response.status_code, 200) xml = fromstring(response.content) self.assertEqual(len(xml), 3) diffDict = ParseOsmDiffToDict(xml) self.assertEqual(-1 in diffDict["node"], True) self.assertEqual(-2 in diffDict["node"], True) self.assertEqual(-3 in diffDict["way"], True) self.assertEqual(diffDict["node"][-1][1], 1) self.assertEqual(diffDict["node"][-2][1], 1) self.assertEqual(diffDict["way"][-3][1], 1)
def from_response(cls, resp, ns) -> List["GroupItem"]: all_group_items = list() parsed_response = fromstring(resp) all_group_xml = parsed_response.findall(".//t:group", namespaces=ns) for group_xml in all_group_xml: name = group_xml.get("name", None) group_item = cls(name) group_item._id = group_xml.get("id", None) # Domain name is returned in a domain element for some calls domain_elem = group_xml.find(".//t:domain", namespaces=ns) if domain_elem is not None: group_item.domain_name = domain_elem.get("name", None) # Import element is returned for both local and AD groups (2020.3+) import_elem = group_xml.find(".//t:import", namespaces=ns) if import_elem is not None: group_item.domain_name = import_elem.get("domainName", None) group_item.license_mode = import_elem.get( "grantLicenseMode", None) group_item.minimum_site_role = import_elem.get( "siteRole", None) all_group_items.append(group_item) return all_group_items
def get_nbrb_rates(for_date): # returns: {char_code: rate} for specified date url = 'http://www.nbrb.by/Services/XmlExRates.aspx' ondate = for_date.strftime('%m/%d/%Y') response = requests.get(url, params={'ondate': ondate}) text = response.content.decode('utf-8-sig') # TODO: check for xml attacks: https://docs.python.org/3/library/xml.html, https://pypi.python.org/pypi/defusedxml/ root = fromstring(text) result = {} if root.tag == 'DailyExRates' and root.attrib['Date'] == ondate: if len(root) == 0: warning('empty DailyExRates for date %s' % ondate) return for currency_node in root: if currency_node.tag != 'Currency': warning('%s is not currency node' % currency_node) continue char_code = currency_node.find('CharCode').text rate = float(currency_node.find('Rate').text) result[char_code] = rate return result
def add_user(self, access_req): self.url = self.qualys_host + "/msp/user.php" usrLst = access_req['userList'] for user in usrLst: userinfo = user.split(',') # uname,name,email pswd = userinfo[0] + '!vul5c4p1' parms = {'action': 'add', 'user_role': 'scanner', 'business_unit': 'Unassigned', 'asset_groups': access_req['site_name'], 'first_name': userinfo[1].split(' ')[0], 'last_name': userinfo[1].split(' ')[1], 'title': 'Scanner User', 'phone': '0000000000', 'email': userinfo[2], 'address1': '3401 Hillview Ave', 'city': 'Palo Alto', 'country': 'United States of America', 'state': 'California', 'zip_code': '94304', 'send_email': '1'} response_user_add = self.makeRequest(parms) # print(response_user_add.content) responseXML = response_user_add.content tree = ElementTree(fromstring(responseXML)) root = tree.getroot() asset_response = root.find('RETURN') user_add_status = asset_response.get('status') user_add_status_msg = asset_response.find('MESSAGE').text # print(user_add_status + user_add_status_msg) if user_add_status == "SUCCESS": Utilities.printSuccess( user_add_status_msg +" for " + userinfo[1]) return True else: Utilities.printError("User addition Failure: " + user_add_status_msg) return False
def apply_logicalnames(self, site_design, state_manager): """Gets the logicalnames for devices from lshw. :param site_design: SiteDesign object. :param state_manager: DrydockState object. :return: Returns sets a dictionary of aliases that map to logicalnames in self.logicalnames. """ logicalnames = {} results = state_manager.get_build_data(node_name=self.get_name(), latest=True) xml_data = None for result in results: if result.generator == "lshw": xml_data = result.data_element break if xml_data: xml_root = fromstring(xml_data) for hardware_profile in site_design.hardware_profiles: for device in hardware_profile.devices: logicalname = self._apply_logicalname( xml_root, device.alias, device.bus_type, device.address) logicalnames[device.alias] = logicalname else: self.logger.info("No Build Data found for node_name %s" % (self.get_name())) self.logicalnames = logicalnames
def test_upload_create_single_node(self): cs = CreateTestChangeset(self.user, tags={"foo": "invade"}, is_open=True) xml = """<osmChange generator="JOSM" version="0.6"> <create> <node changeset="{}" id="-5393" lat="50.79046578105" lon="-1.04971367626" /> </create> </osmChange>""".format(cs.objId) response = self.client.post(reverse('changeset:upload', args=(cs.objId, )), xml, content_type='text/xml') if response.status_code != 200: print(response.content) self.assertEqual(response.status_code, 200) xml = fromstring(response.content) self.assertEqual(len(xml), 1) ndiff = xml[0] self.assertEqual(int(ndiff.attrib["old_id"]), -5393) self.assertEqual(int(ndiff.attrib["new_version"]), 1) self.assertEqual(int(ndiff.attrib["new_id"]) > 0, True) dbNode = GetObj(p, "node", int(ndiff.attrib["new_id"])) self.assertEqual(dbNode is not None, True) self.assertEqual(dbNode.metaData.username, self.user.username) self.assertEqual(dbNode.metaData.uid, self.user.id) self.assertEqual( abs(dbNode.metaData.timestamp - time.time()) < 60, True)
def add_asset(self, access_req): self.url = self.qualys_host + "/api/2.0/fo/asset/ip/" params = {'action': 'add', 'ips': access_req['ip'], 'enable_vm': '1'} max_login_try_limit = 2 while True: # Login check done here, if it fails here then rest all task is skipped if (self.login_try > 0) and (self.login_try < max_login_try_limit): self.uname = input("Please enter your username for " + " Qualys" + ": ") self.passwd = input("Please enter your password for " + " Qualys" + ": ") elif self.login_try >= max_login_try_limit: Utilities.printError("Qualys login attemts exceded maximum limit, skipping Qualys tasks..") return False response_aasset_add = self.makeRequest(params) # print(response_aasset_add.content) responseXML = response_aasset_add.content tree = ElementTree(fromstring(responseXML)) root = tree.getroot() asset_response = root.find('RESPONSE') asset_status = asset_response.find('TEXT').text if asset_status == "IPs successfully added to Vulnerability Management": Utilities.printSuccess("Asset added to Qualys Scanner") return True elif asset_status == "Bad Login/Password": Utilities.printError("Qualys login failed..") self.login_try += 1 else: Utilities.printError("Asset adition Failure: " + asset_status) Utilities.printLog("Skipping remaning Qualys tasks..") return False
def from_response(cls, resp, ns=None) -> List["PermissionsRule"]: parsed_response = fromstring(resp) rules = [] permissions_rules_list_xml = parsed_response.findall( ".//t:granteeCapabilities", namespaces=ns) for grantee_capability_xml in permissions_rules_list_xml: capability_dict: Dict[str, str] = {} grantee = PermissionsRule._parse_grantee_element( grantee_capability_xml, ns) for capability_xml in grantee_capability_xml.findall( ".//t:capabilities/t:capability", namespaces=ns): name = capability_xml.get("name") mode = capability_xml.get("mode") if name is None or mode is None: logger.error("Capability was not valid: ", capability_xml) raise UnpopulatedPropertyError() else: capability_dict[name] = mode rule = PermissionsRule(grantee, capability_dict) rules.append(rule) return rules
def test_expand_bbox(self): cs = CreateTestChangeset(self.user) response = self.client.post(reverse('changeset:expand_bbox', args=(cs.objId, )), self.expandBboxXml, content_type='text/xml') self.assertEqual(response.status_code, 200) t = p.GetTransaction("ACCESS SHARE") cs2 = pgmap.PgChangeset() errStr = pgmap.PgMapError() t.GetChangeset(cs.objId, cs2, errStr) self.assertEqual(cs2.bbox_set, True) self.assertEqual(abs(cs2.y1 - 50.2964626834) < 1e-5, True) self.assertEqual(abs(cs2.y2 - 51.7985258134) < 1e-5, True) self.assertEqual(abs(cs2.x1 + 5.24880409375) < 1e-5, True) self.assertEqual(abs(cs2.x2 + 3.08999061719) < 1e-5, True) xml = fromstring(response.content) self.assertEqual(xml.tag, "osm") csout = xml.find("changeset") self.assertEqual(int(csout.attrib["id"]) == cs.objId, True) self.assertEqual( abs(float(csout.attrib["min_lat"]) - 50.2964626834) < 1e-5, True) self.assertEqual( abs(float(csout.attrib["max_lat"]) - 51.7985258134) < 1e-5, True) self.assertEqual( abs(float(csout.attrib["min_lon"]) + 5.24880409375) < 1e-5, True) self.assertEqual( abs(float(csout.attrib["max_lon"]) + 3.08999061719) < 1e-5, True) t.Commit()
def test_get_changeset(self): teststr = u"Съешь же ещё этих мягких французских булок да выпей чаю" cs = CreateTestChangeset(self.user, tags={"foo": "bar", 'test': teststr}, bbox=(-1.0893202,50.7942715,-1.0803509,50.7989372)) anonClient = Client() response = anonClient.get(reverse('changeset:changeset', args=(cs.objId,))) self.assertEqual(response.status_code, 200) xml = fromstring(response.content) self.assertEqual(xml.tag, "osm") csout = xml.find("changeset") self.assertEqual(int(csout.attrib["id"]) == cs.objId, True) self.assertEqual("uid" in csout.attrib, True) self.assertEqual("created_at" in csout.attrib, True) self.assertEqual("min_lon" in csout.attrib, True) self.assertEqual("max_lon" in csout.attrib, True) self.assertEqual("min_lat" in csout.attrib, True) self.assertEqual("max_lat" in csout.attrib, True) self.assertEqual(csout.attrib["open"], "true") self.assertEqual(len(csout.findall("tag")), 2) foundFirst, foundSecond = False, False for tag in csout.findall("tag"): if tag.attrib["k"] == "foo": self.assertEqual(tag.attrib["v"], "bar") foundFirst = True if tag.attrib["k"] == "test": self.assertEqual(tag.attrib["v"], teststr) foundSecond = True self.assertEqual(foundFirst, True) self.assertEqual(foundSecond, True) self.assertEqual(csout.find("discussion"), None)
def set_show_id(self): self.log.debug('Retrieving series id for %s', self.show) cache = os.path.join(self.get_cache_dir(self.show), 'show_id') try: if not self.cache: raise IOError with open(cache, 'r') as f: xml = f.read().decode('utf8') except IOError: xml = self.request_show_id(cache) try: xml = xml.encode('utf-8') # deal with py2 faff except UnicodeEncodeError: pass # self.log.debug('XML: Attempting to parse 1 %s',xml) try: tree = fromstring(xml) except ParseError: raise errors.InvalidXMLException(self.show) if tree is None or len(tree) is 0: raise errors.InvalidXMLException(self.show) self.log.debug('XML: Parsed 1') self.show_id, self.show = self.get_show_id_from_xml(tree) self.log.debug('Retrieved show id: %s', self.show_id) self.log.debug('Retrieved canonical show name: %s', self.show)
def from_response(cls, xml, ns) -> List["JobItem"]: parsed_response = fromstring(xml) all_tasks_xml = parsed_response.findall(".//t:job", namespaces=ns) all_tasks = [JobItem._parse_element(x, ns) for x in all_tasks_xml] return all_tasks
def __add_file_to_index(self, fnmtime, filename, ctype="news"): """ Reads in a file, processes it into lines, ElementTree grabs text out of the tags, processes the input to remove banal words and symbols, and then adds it to the index. """ # Enable writing to our chosen index. To limit the index # locking, this is the only function that writes to the index. writer = self.index.writer() card_root = GlobalConfig.get("paths", "data_root") + "/private" card_path = card_root + "/" + self.config.get("paths", ctype) with open(card_path + "/" + filename, 'r', encoding='utf-8') as indexfh: body = "" lines = indexfh.read().splitlines() unrolled = unroll_newlines(lines) body += unrolled[0] body += unrolled[1] for line in unrolled: if line.find('<p') == 0: e = fromstring(escape_amp(line)) for t in e.itertext(): body += t + " " self.__process_input(body, returning="contents") # Update wraps add if the document hasn't been inserted, and # replaces current indexed data if it has been inserted. This # requires the file parameter to be set "unique" in the Schema writer.update_document(file=filename, ctype=ctype, mtime=str(fnmtime), content=self.content) # Finish by commiting the updates writer.commit()
def test_publish_multi_connection(self) -> None: new_datasource = TSC.DatasourceItem( name="Sample", project_id="ee8c6e70-43b6-11e6-af4f-f7b0d8e20760") connection1 = TSC.ConnectionItem() connection1.server_address = "mysql.test.com" connection1.connection_credentials = TSC.ConnectionCredentials( "test", "secret", True) connection2 = TSC.ConnectionItem() connection2.server_address = "pgsql.test.com" connection2.connection_credentials = TSC.ConnectionCredentials( "test", "secret", True) response = RequestFactory.Datasource._generate_xml( new_datasource, connections=[connection1, connection2]) # Can't use ConnectionItem parser due to xml namespace problems connection_results = fromstring(response).findall(".//connection") self.assertEqual(connection_results[0].get("serverAddress", None), "mysql.test.com") self.assertEqual( connection_results[0].find("connectionCredentials").get( "name", None), "test") # type: ignore[union-attr] self.assertEqual(connection_results[1].get("serverAddress", None), "pgsql.test.com") self.assertEqual( connection_results[1].find("connectionCredentials").get( "password", None), "secret") # type: ignore[union-attr]
def _latest_version(self): try: resp = requests.get(self.badge_url) except requests.exceptions.ConnectionError: return '0.0.0' element_tree = fromstring(resp.text) return element_tree.findall( '{ns}g'.format(ns=self.name_space))[1].findall( '{ns}text'.format(ns=self.name_space))[2].text
def query_institution(name=''): inst_id = query_id(str(name)) r = query('lookup', str(inst_id)) et = fromstring(str(r.content)) # finally, the data we were looking for if et.find('brokerid') != None: return {'fid' : et.find('fid').text, 'fiorg' : et.find('org').text, 'url' : et.find('url').text, 'brokerid' : et.find('brokerid').text } else: return {'fid' : et.find('fid').text, 'fiorg' : et.find('org').text, 'url' : et.find('url').text }
def test_presentation(self): """Make sure the data can be found in the Excel spreadsheet.""" output = StringIO() values = ['A', 'B'] records = [ {'A': 1, 'B': 'b'}, {'A': 2, 'B': 'bb'}, ] self.xlsx.write_presentation(values, records, output) sheet_entry = 'xl/worksheets/sheet1.xml' sheet_content = ZipFile(output).open(sheet_entry).read() root = fromstring(sheet_content) cells = root.findall('.//ssml:t', self.ns) values = [e.text for e in cells] expected_values = ['A', 'B', '1', 'b', '2', 'bb'] self.assertEqual(expected_values, values)
def get_chat_status(name): """ Get the chat status for a location by name. Args: name: string, the name of the chat widget you wish to retrieve. Possible values include: uofc-ask, law, crerar, and ssa. Returns: boolean """ xml = requests.get('https://us.libraryh3lp.com/presence/jid/' \ + name + '/chat.libraryh3lp.com/xml') tree = fromstring(xml.content) return tree.find('resource').attrib['show'] == 'available'
def find_in_rss(self, path): url = self.get_url() app = self._get_test_app() response = app.get(url, status=[200]) et = fromstring(response.body) namespaces = {'xmlns': 'http://www.w3.org/2005/Atom', 'georss': 'http://www.georss.org/georss'} elements = et.find(path, namespaces=namespaces) assert_true(elements is not None, "Couldn't find elements matching path {0} in feed".format( path)) return elements
def get_scanners(self): self.url = self.qualys_host + "/api/2.0/fo/appliance/" parms = {'action': 'list'} response_get_scanners = self.makeRequest(parms) # print(response_get_scanners.content) responseXML = response_get_scanners.content tree = ElementTree(fromstring(responseXML)) root = tree.getroot() if root.find('RESPONSE') is not None: response = root.find('RESPONSE') if response.find('APPLIANCE_LIST') is not None: appliance_list = response.find('APPLIANCE_LIST') appliance = appliance_list.findall('APPLIANCE') # we take only the first appliance, coz no multiple appliance nw. appliance_id = appliance[0].find('ID').text if response.find('TEXT') is not None: # Error condition Utilities.printError("Failure to get the scanner list: "+ response.find('TEXT').text) appliance_id = None # print(appliance_id) return appliance_id
def put_import_task_status(self, task_id=None, status=None, volume_id=None, bytes_converted=None, error_code=None, message=None): if task_id is None or status is None: raise RuntimeError("Invalid parameters") params = {'InstanceId': config.get_worker_id(), 'ImportTaskId': task_id, 'Status': status} if bytes_converted is not None: params['BytesConverted'] = bytes_converted if volume_id is not None: params['VolumeId'] = volume_id if error_code is not None: params['ErrorCode'] = error_code if message is not None: params['Message'] = message resp = self.conn.make_request('PutInstanceImportTaskStatus', params, path='/', verb='POST') if resp.status != 200: raise httplib.HTTPException(resp.status, resp.reason, resp.read()) response = resp.read() root = fromstring(response) cancelled = root.getchildren()[0] if len(root.getchildren()) == 1 else 'true' return 'true' != cancelled.text
def logoutOperation(self): xmlReq = Element('LogoutRequest', attrib={'session-id': self.session_id}) xmlTree = ElementTree(xmlReq) f = BytesIO() xmlTree.write(f, encoding='utf-8', xml_declaration=True) # required so that xml declarations will come up in generated XML logoutReqXML = f.getvalue().decode("utf-8") # converts bytes to string # print(logoutReqXML) responseXML = self.makeRequest(self.reqURL, logoutReqXML, self.headers) tree = ElementTree(fromstring(responseXML)) root = tree.getroot() logoutResponse = root.get('success') if (logoutResponse == "1"): self.session_id = root.get('session-id') Utilities.printSuccess("Logged out of Nexpose Scanner") else: fa = root.find('Failure') ex = fa.find('Exception') msg = ex.find('message').text Utilities.printError("Logout Failure: " + msg)
def valid_response(responseContent): #Proxy should only be used when expecting an XML or JSON response #ArcGIS Server GetFeatureInfo xml response if re.match("<FeatureInfoResponse", responseContent): return responseContent if responseContent[0] == "<": try: from defusedxml.ElementTree import fromstring et = fromstring(responseContent) if re.match(_valid_tags, et.tag): return responseContent except ParseError: return None elif re.match('\[|\{', responseContent): try: json.loads(responseContent) return responseContent except: return None return None
def valid_response(responseContent): #Proxy should only be used when expecting an XML or JSON response #ArcGIS Server GetFeatureInfo xml response if re.match("<FeatureInfoResponse", responseContent): return responseContent if responseContent[0] == "<": try: from defusedxml.ElementTree import fromstring et = fromstring(responseContent) if re.match("\{http\:\/\/www\.opengis\.net\/wms\}WMS_Capabilities|WMT_MS_Capabilities|WMS_DescribeLayerResponse|\{http\:\/\/www\.opengis\.net\/gml\}FeatureCollection|msGMLOutput", et.tag): return responseContent except ParseError: return None elif re.match('\[|\{', responseContent): try: json.loads(responseContent) return responseContent except: return None return None
def get_chat_status_and_css(name): """ Get the chat status and css for Ask a Librarian pages. Args: name: string, the name of the chat widget you wish to retrieve. Possible values include: uofc-ask, law, crerar, and ssa. Returns: Tuple representing the chat status for Ask a Librarian pages where the first item is a boolean and the second item is a string (css class). """ xml = requests.get('https://us.libraryh3lp.com/presence/jid/' \ + name + '/chat.libraryh3lp.com/xml') tree = fromstring(xml.content) status_lookup = {True: 'active', False: 'off'} status_bool = tree.find('resource').attrib['show'] == 'available' return (status_bool, status_lookup[status_bool])
def add_asset_grp(self, access_req): scanner_id = self.get_scanners() self.url = self.qualys_host + "/api/2.0/fo/asset/group/" if scanner_id is not None: params = {'action': 'add', 'ips': access_req['ip'], 'title': access_req['site_name'], 'appliance_ids':scanner_id} # print(self.url) response_asset_grp_add = self.makeRequest(params) # print(response_asset_grp_add.content) responseXML = response_asset_grp_add.content tree = ElementTree(fromstring(responseXML)) root = tree.getroot() asset_response = root.find('RESPONSE') asset_status = asset_response.find('TEXT').text if asset_status == "Asset Group successfully added.": Utilities.printSuccess("Asset group added to Qualys Scanner") return True else: Utilities.printError("Asset group addition Failure: " + asset_status) Utilities.printLog("Skipping remaning Qualys tasks..") return False else: Utilities.printError("Asset Group adition Failure: Scanner id not found") return False
def set_episode_title(self, url): self.log.debug('Episode URL: %s', url) self.log.debug('Attempting to retrieve episode title') req = requests.get(url) if not req.ok: args = (self.show, self.season, self.episode) raise errors.EpisodeNotFoundException(*args) self.log.debug('XML: Retreived') self.log.debug('XML: Attempting to parse 2') try: tree = fromstring(req.content) except ParseError: raise errors.InvalidXMLException(self.show) if tree is None: raise errors.InvalidXMLException(self.show) args = (self.show, self.season, str(self.episode).zfill(2)) self.log.debug('XML: Episode document retrived for %s - %s%s', *args) self.log.debug('XML: Attempting to find the episode title') self.title = self.get_episode_title_from_xml(tree) self.log.debug('Retrieved episode title: %s', self.title)
def query_id(name=''): r = query('search', str(name)) et = fromstring(str(r.content)) return str(et.find('institutionid').get('id'))