def check_amazons3be(c,i,search,keyword, config): return_dict = {} myparser = jxmlease.Parser() ip = i['target']['ip'] port = i['target']['port'] files = [] indicators = [] if Amazons3be.objects.filter(ip=ip).exists() or ip in config['config']['blacklist']: pass else: try: root = myparser(i['result']['data']['response']['body']['content']) if 'Contents' in root['ListBucketResult']: for counter, i in enumerate(root['ListBucketResult']['Contents']): if keyword in str(i['Key'].lower()): indicators.append(str(i['Key'])) if counter < 50: files.append(str(i['Key'])) device = Amazons3be(search=search, ip=ip, port=port, files=files, indicator=indicators) device.save() return_dict[c] = {"ip": ip, "port": port, 'files': files} except Exception as e: return_dict = {} print(e) return return_dict
def sys_license_keys(host): dev = Device(host=host, user=username, password=password, normalize=True) dev.open() rpc = dev.rpc.get_license_key_information() rpc_xml = etree.tostring(rpc, pretty_print=True, encoding='unicode') dev.close() xmlparser = jxmlease.Parser() result = jxmlease.parse(rpc_xml) print('\n' + 120 * '*' + '\n') print('License keys for host {}'.format(host)) print('\n' + 120 * '*' + '\n') # if license exists if result.get('license-key-information'): # For multiple licenses if isinstance(result['license-key-information']['license-key'],list): for license in result['license-key-information']['license-key']: print(license['key-data']) # For a single license else: print(result['license-key-information']['license-key']['key-data']) # if no license else: print('No license found')
def sys_hardware(host, inv): dev = Device(host=host, user=username, password=password, normalize=True) dev.open() hostname = str(dev.facts['hostname']) print('Connecting to {} \n'.format(hostname)) rpc = dev.rpc.get_chassis_inventory() rpc_xml = etree.tostring(rpc, pretty_print=True, encoding='unicode') dev.close() xmlparser = jxmlease.Parser() result = jxmlease.parse(rpc_xml) chassis_type = str(result['chassis-inventory']['chassis']['description']) if any('chassis-sub-module' in modules for modules in result['chassis-inventory']['chassis']['chassis-module']): for modules in result['chassis-inventory']['chassis'][ 'chassis-module']: if re.match(r'Routing Engine \d', str( modules.get('name'))) is None: inv.append(hostname + ',' + chassis_type + ',' + str(modules['model-number'] + ',' + str(modules['name']) + ',' + str(modules['serial-number']))) if modules.get('chassis-sub-module'): for submodules in modules.get('chassis-sub-module'): if submodules.get('chassis-sub-sub-module'): for items in submodules.get('chassis-sub-sub-module'): inv.append(hostname + ',' + chassis_type + ',' + str(items['description']) + ',' + str(items['name']) + ',' + str(items['serial-number'])) elif submodules.get('serial-number') and str( submodules.get('serial-number')) != 'BUILTIN': if any( str(submodules.get('serial-number')) in invent for invent in inv): continue else: inv.append(hostname + ',' + chassis_type + ',' + str(submodules['model-number']) + ',' + str(submodules['name']) + ',' + str(submodules['serial-number'])) else: inv.append( hostname + ',' + chassis_type + ',' + str(result['chassis-inventory']['chassis'].get('description')) + ',' + str(result['chassis-inventory']['chassis'].get('description')) + ',' + str(result['chassis-inventory']['chassis'].get('serial-number'))) return (inv)
import getpass import requests import jxmlease import jnpr.junos import Device import xml.etree.ElementTree as ET from datetime import datetime import yaml, argparse, jinja2 from lxml import etree SHEME = 'http' PORT = 3000 SINGLE_RPC_URL_FORMAT = SCHEME + '//%s:' + str(PORT) + '/rpc/%s@format=%s' MULTIPLE_RPC_URL_FORMAT = SCHEME + '//%s' + str(PORT) + '/rpc' parser = jxmlease.Parser() def get_lldp_information(device, user, passwd): url = SINGLE_RPC_URL_FORMAT % (device, 'get-lldp-neighbors-information', 'json') http_resp = requests.get(url, auth = (user, passwd)) http_resp.raise_for_status() if http_resp.headers['Content-Type'].startswith('application/xml'): _ = check_for_warnings_and_errors(parser(http_resp.text)) return None resp = http_resp.json() lldp_info = {} try:
from jnpr.junos import Device from dictdiffer import diff from lxml import etree import jxmlease dev = Device(host='172.27.14.72', user='******', passwd='jun2per', normalize=True) dev.open() rpc1 = dev.rpc.get_statistics_information() rpc_xml1 = etree.tostring(rpc1, encoding='unicode', pretty_print=True) xmlparser1 = jxmlease.Parser() result1 = jxmlease.parse(rpc_xml1) #print(result1) #print(result1['statistics'].keys()) for table, proto, stat in result1.items(): print("\nStatistics: ", table) for protocols in proto: print('\nProtocols: ', protocols) for count in stat: print(count + ': ', protocols[key]) ### working #for table, proto in result1.items(): # for protocols in proto: # print('\nProtocols: ', protocols) ### #print(result1['statistics']['arp']['arp-iri-cnt']) #rpc2 = dev.rpc.get_statistics_information()
def load_content(self): """ Download the content of ECMDB and store it to a local sqlite database. """ db_session = self.session req_session = self.requests_session # download content from server if self.verbose: print('Downloading compound IDs ...') response = req_session.get(self.DOWNLOAD_INDEX_URL) response.raise_for_status() if self.verbose: print(' done') # unzip and parse content if self.verbose: print('Parsing compound IDs ...') with zipfile.ZipFile(io.BytesIO(response.content), 'r') as zip_file: with zip_file.open('ecmdb.json', 'r') as json_file: entries = json.load(json_file) if self.verbose: print(' found {} compounds'.format(len(entries))) # sort entires entries.sort(key=lambda e: e['m2m_id']) # limit number of processed entries if len(entries) > self.max_entries: entries = entries[0:self.max_entries] # load content into sqlite database if self.verbose: print('Downloading {} compounds ...'.format(len(entries))) xml_parser = jxmlease.Parser() for i_entry, entry in enumerate(entries): if self.verbose and (i_entry % 10 == 0): print(' Downloading compound {} of {}'.format( i_entry + 1, len(entries))) # get details response = req_session.get( self.DOWNLOAD_COMPOUND_URL.format(entry['m2m_id'])) try: response.raise_for_status() except requests.exceptions.HTTPError: warnings.warn( 'Unable to download data for compound {}'.format( entry['m2m_id']), data_source.DataSourceWarning) continue entry_details = xml_parser(response.text)['compound'] compound = self.get_or_create_object(Compound, id=self.get_node_text( entry_details['m2m_id'])) if 'name' in entry_details: compound.name = self.get_node_text(entry_details['name']) if 'description' in entry_details: compound.description = self.get_node_text( entry_details['description']) compound.structure = self.get_node_text(entry_details['inchi']) if not compound.structure: response2 = req_session.get( self.DOWNLOAD_COMPOUND_STRUCTURE_URL.format( entry['m2m_id'])) response2.raise_for_status() compound.structure = response2.text compound.comment = entry['comment'] compound.created = dateutil.parser.parse( self.get_node_text( entry_details['creation_date'])).replace(tzinfo=None) compound.updated = dateutil.parser.parse( self.get_node_text( entry_details['update_date'])).replace(tzinfo=None) # calculate core InChI layers to facilitate searching try: compound._structure_formula_connectivity = molecule_util.InchiMolecule(compound.structure) \ .get_formula_and_connectivity() except ValueError: warnings.warn( 'Unable to encode structure for {} in InChI'.format( entry['m2m_id']), data_source.DataSourceWarning) compound._structure_formula_connectivity = None # synonyms compound.synonyms = [] if 'iupac_name' in entry_details: node = entry_details['iupac_name'] name = self.get_node_text(node) compound.synonyms.append( self.get_or_create_object(Synonym, name=name)) if 'traditional_iupac' in entry_details: node = entry_details['traditional_iupac'] name = self.get_node_text(node) compound.synonyms.append( self.get_or_create_object(Synonym, name=name)) parent_node = entry_details['synonyms'] if 'synonym' in parent_node: nodes = self.get_node_children(parent_node, 'synonym') for node in nodes: name = self.get_node_text(node) compound.synonyms.append( self.get_or_create_object(Synonym, name=name)) # locations compound.compartments = [] parent_node = entry_details['cellular_locations'] if 'cellular_location' in parent_node: nodes = self.get_node_children(parent_node, 'cellular_location') for node in nodes: name = self.get_node_text(node) compound.compartments.append( self.get_or_create_object(Compartment, name=name)) # todo (enhancement): parse experimental properties # * state # * melting_point # * water_solubility # * logp_hydrophobicity # concentrations compound.concentrations = [] parent_node = entry_details['concentrations'] if 'concentration' in parent_node: values = self.get_node_children(parent_node, 'concentration') errors = self.get_node_children(parent_node, 'error') units = self.get_node_children(parent_node, 'concentration_units') strains = self.get_node_children(parent_node, 'strain') statuses = self.get_node_children(parent_node, 'growth_status') medias = self.get_node_children(parent_node, 'growth_media') temperatures = self.get_node_children(parent_node, 'temperature') systems = self.get_node_children(parent_node, 'growth_system') references = self.get_node_children(parent_node, 'reference') for i_conc in range(len(values)): value = float(self.get_node_text(values[i_conc])) error = float(self.get_node_text(errors[i_conc]) or 'nan') unit = self.get_node_text(units[i_conc]) if unit == 'uM': pass else: raise ValueError('Unsupport units: {}'.format(unit)) if temperatures[i_conc]: temperature, unit = self.get_node_text( temperatures[i_conc]).split(' ') temperature = float(temperature) if unit != 'oC': raise ValueError( 'Unsupport units: {}'.format(unit)) else: temperature = None concentration = Concentration( value=value, error=error, strain=self.get_node_text(strains[i_conc]) or None, growth_status=self.get_node_text(statuses[i_conc]) or None, media=self.get_node_text(medias[i_conc]) or None, temperature=temperature, growth_system=self.get_node_text(systems[i_conc]) or None, ) db_session.add(concentration) if 'pubmed_id' in references[i_conc]: pmid_nodes = self.get_node_children( references[i_conc], 'pubmed_id') for node in pmid_nodes: id = self.get_node_text(node) concentration.references.append( self.get_or_create_object(Resource, namespace='pubmed', id=id)) compound.concentrations.append(concentration) # cross references compound.cross_references = [] id = self.get_node_text(entry_details['biocyc_id']) if id: compound.cross_references.append( self.get_or_create_object(Resource, namespace='biocyc', id=id)) id = self.get_node_text(entry_details['cas_registry_number']) if id: compound.cross_references.append( self.get_or_create_object(Resource, namespace='cas', id=id)) id = self.get_node_text(entry_details['chebi_id']) if id: compound.cross_references.append( self.get_or_create_object(Resource, namespace='chebi', id='CHEBI:' + id)) id = self.get_node_text(entry_details['chemspider_id']) if id: compound.cross_references.append( self.get_or_create_object(Resource, namespace='chemspider', id=id)) id = self.get_node_text(entry_details['foodb_id']) if id: compound.cross_references.append( self.get_or_create_object(Resource, namespace='foodb.compound', id=id)) id = self.get_node_text(entry_details['het_id']) if id: compound.cross_references.append( self.get_or_create_object(Resource, namespace='ligandexpo', id=id)) id = self.get_node_text(entry_details['hmdb_id']) if id: compound.cross_references.append( self.get_or_create_object(Resource, namespace='hmdb', id=id)) id = self.get_node_text(entry_details['kegg_id']) if id: compound.cross_references.append( self.get_or_create_object(Resource, namespace='kegg.compound', id=id)) id = self.get_node_text(entry_details['msds_url']) if id: compound.cross_references.append( self.get_or_create_object(Resource, namespace='msds.url', id=id)) id = self.get_node_text(entry_details['pubchem_compound_id']) if id: compound.cross_references.append( self.get_or_create_object(Resource, namespace='pubchem.compound', id=id)) id = self.get_node_text(entry_details['wikipidia']) if id: compound.cross_references.append( self.get_or_create_object(Resource, namespace='wikipedia.en', id=id)) # add to session db_session.add(compound) if self.commit_intermediate_results and (i_entry % 100 == 99): db_session.commit() if self.verbose: print(' done') # commit changes to database if self.verbose: print('Saving database ...') db_session.commit() if self.verbose: print(' done')