@author: Kurach Aleksandr <*****@*****.**> @license: GPL @summary: Warranty check for PyLXCA ''' import sys import requests from requests.auth import HTTPBasicAuth import simplejson as json import objectpath from variable import * r = requests.get(ip + url_warranty, auth=HTTPBasicAuth(user, passwd), verify=False) d = json.loads(r.text) for item in d: print("Chassis Name: " + item['deviceName']) # get chasiss print("Status: " + item['status']) print("End Date: " + item['endDate']) print() tr = objectpath.Tree(item['children']) t2 = tuple(tr.execute('$..children')) for device in t2: if 'deviceType' in device: print("Node Name: " + device['deviceName']) print("sn: " + device['serialNumber']) print("Status: " + device['status']) print("End Date: " + device['endDate']) print()
def __init__(self, json_dict): self.tree = objectpath.Tree(json_dict) self.statements = []
def get_weather_info(data_list, arg): tree = objectpath.Tree(data_list) return (tree.execute(arg))
def __init__(self, json_data): self.tree = objectpath.Tree(json_data)
*option to create new output.txt or use pre-existing one. """ #python imports import requests import objectpath #startup print('Gathering possible Apple phishing sites') url = 'https://urlscan.io/api/v1/search/' params = dict( q = 'PhishTank OR OpenPhish OR CertStream-Suspicious', size = '1000') response = requests.get(url=url, params=params).json() #parsing the json for domain values results_tree = objectpath.Tree(response['results']) domain_search = tuple(results_tree.execute('$..domain')) #searching if any apple related words in domain, if so print #add more search keys with open('Output.txt' , 'w') as file: for i in domain_search: if ('apple') in i: file.write(i + '\n') elif ('icloud') in i: file.write(i + '\n') print('Done, domains in Output.txt') #checking if domain is registered with 16Shop C2 print('Checking domains against known 16Shop C2s') bad_actor = []
def checkType(self, tweet, item): tree_obj = objectpath.Tree(tweet) try: self.tweet_info[item].append(list(tree_obj.execute('$..type'))[-1]) except: self.tweet_info[item].append('text')
import re import objectpath import pytz import sys non_bmp = dict.fromkeys(range(0x1000, sys.maxunicode + 1), 0xfffd) TWITTER_URL = 'https://api.twitter.com/1.1/search/tweets.json?' ctx=ssl.create_default_context() ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE while True: print('') query = input('Enter the query:') if len(query)<1: break url = twurl.augment(TWITTER_URL, {'q':query, 'count':'100'}) con = ur.urlopen(url, context = ctx) jas = json.loads(con.read().decode()) tree_obj = objectpath.Tree(jas) tweets = list(tree_obj.execute('$..text')) for x in tweets: try: print(x) except: print(x.translate(non_bmp)) headers = dict(connection.getheaders()) print('Remaining', headers['x-rate-limit-remaining'])
def createFlowMultiSubnet(): dirpath = os.getcwd() def num_to_directory(argument): switcher = { 0: dirpath + '/dataFlowARPGw', 1: dirpath + '/dataFlowARP', 2: dirpath + '/dataFlowIP', 3: dirpath + '/dataFlowL2', 4: dirpath + '/dataFlowL3', 5: dirpath + '/dataFlowRegistrasi', 6: dirpath + '/dataFlowARPL2' } return switcher.get(argument, 'false') for countDir in range(0, 7): directory = num_to_directory(countDir) if os.path.exists(directory): shutil.rmtree(directory) if not os.path.exists(directory): os.makedirs(directory) with open('dataNodesNew.json') as fh: data = json.load(fh) #get file json Nodes countNodes = len(data['nodes']['node']) #count how many nodes are there print(countNodes) for nodes in range( 0, countNodes ): #loop for the amount of switch is connected to the controller countNodeConnected = len( data['nodes']['node'][nodes] ['node-connector']) #count how many ports are on the switch print(countNodeConnected) idNum = 0 #ID for the flow nodeID = data['nodes']['node'][nodes]['id'] #get the ID of the switch print(nodeID) nodeIDName = nodeID.replace( 'openflow:', 'S' ) #this is used for the file name for each flow, to identify each flow for which switch ID print(nodeIDName) for x in range( 0, countNodeConnected ): #loop for the amount of ports there are on one switch connectorConfiguration = data['nodes']['node'][nodes][ 'node-connector'][x][ 'flow-node-inventory:configuration'] #this is to detect the port is down or not if (connectorConfiguration != 'PORT-DOWN' ): #if the port is not down, then we create the flow if ('address-tracker:addresses' in data['nodes']['node'][nodes]['node-connector'][x]): jsonnn_tree = objectpath.Tree( data['nodes']['node'][nodes]['node-connector'][x]) c = "$.'address-tracker:addresses'.ip" #this is the location in the json file for IP address from each devices that connected to the switch result_IP = tuple( jsonnn_tree.execute(c) ) #get all IP address from the json file and make it as tuple countAddress = len( data['nodes']['node'][nodes]['node-connector'][x] ['address-tracker:addresses'] ) #count how many devices is connected to the switch on one port print('Number of Devices : ' + str(countAddress)) for address in range( 0, countAddress): #loop for the amount of the devices IP = ''.join( result_IP[address] ) #IP address of a single device is assign as string for each loop. This is used to get the mac address and the port number of the switch for that IP address print(IP) a = "$.'address-tracker:addresses'[@.'ip' is '" + IP + "'].mac" #get the mac address for the specific IP address nodeConnector = data['nodes']['node'][nodes][ 'node-connector'][x][ 'flow-node-inventory:port-number'] #get the port number for the specific IP address hardwareAddress = data['nodes']['node'][nodes][ 'node-connector'][x][ 'flow-node-inventory:hardware-address'] result_tuple1 = tuple(jsonnn_tree.execute(a)) print(result_tuple1) macAddress = ''.join(result_tuple1) print(macAddress) print('Connected to Port : ' + str(nodeConnector)) IParray = IP.split('.') IParray[3] = '254' IPgw = '.'.join( IParray ) #assuming the default gateway always ends with 254 (ex: 10.0.0.254) IParray[3] = '0' IPnet = '.'.join(IParray) print('Banyaknya node : ', countAddress) if (countAddress == 1): #2nd Flow: putJson = { "flow": [{ "flow-name": "FlowS", "id": idNum, "instructions": { "instruction": [{ "go-to-table": { "table_id": 1 }, "order": 0 }] }, "match": { "ethernet-match": { "ethernet-source": { "address": macAddress } } }, "priority": 100, "table_id": 0 }] } with open( 'dataFlowRegistrasi/dataFlowRegistrasi' + nodeIDName + '-' + str(x) + '-' + str(address) + '.json', 'w') as outfile: json.dump(putJson, outfile, sort_keys=True, indent=4) #Creating the Flow: #1st Flow: putJson = { "flow": [{ "id": idNum, "priority": 10, "flow-name": "FlowIP" + str(x), "match": { "ethernet-match": { "ethernet-type": { "type": 2048 } }, "ipv4-destination": IP + "/32" }, "table_id": 20, "instructions": { "instruction": [{ "order": 0, "apply-actions": { "action": [{ "order": 0, "set-dl-dst-action": { "address": macAddress } }, { "order": 1, "output-action": { "output-node-connector": nodeConnector } }] } }] } }] } with open( 'dataFlowIP/dataFlowIP' + nodeIDName + '-' + str(x) + '-' + str(address) + '.json', 'w') as outfile: json.dump(putJson, outfile, sort_keys=True, indent=4) #make it as a file #2nd Flow: putJson = { "flow": [{ "flow-name": "FlowS", "id": idNum + 200, "instructions": { "instruction": [{ "go-to-table": { "table_id": 10 }, "order": 0 }] }, "match": { "ethernet-match": { "ethernet-type": { "type": 2054 } }, "arp-target-transport-address": IPgw + "/32" }, "priority": 100, "table_id": 1 }] } with open( 'dataFlowARP/dataFlowARP' + nodeIDName + '-' + str(x) + '-' + str(address) + '.json', 'w') as outfile: json.dump(putJson, outfile, sort_keys=True, indent=4) #3rd Flow: putJson = { "flow": [{ "id": idNum, "priority": 100, "flow-name": "FlowARP" + str(x), "match": { "ethernet-match": { "ethernet-type": { "type": 2054 } }, "arp-target-transport-address": IPgw + "/32", }, "table_id": 10, "instructions": { "instruction": [{ "order": 0, "apply-actions": { "action": [{ "openflowplugin-extension-nicira-action:nx-reg-move": { "dst": { "end": 47, "of-eth-dst": [None], "start": 0 }, "src": { "end": 47, "of-eth-src": [None], "start": 0 } }, "order": 0 }, { "openflowplugin-extension-nicira-action:nx-reg-move": { "dst": { "end": 47, "nx-arp-tha": [None], "start": 0 }, "src": { "end": 47, "nx-arp-sha": [None], "start": 0 } }, "order": 1 }, { "order": 2, "set-field": { "arp-source-hardware-address": { "address": hardwareAddress } } }, { "order": 3, "set-field": { "arp-op": 2 } }, { "order": 4, "set-field": { "ethernet-match": { "ethernet-source": { "address": hardwareAddress } } } }, { "openflowplugin-extension-nicira-action:nx-reg-move": { "dst": { "end": 31, "of-arp-tpa": [None], "start": 0 }, "src": { "end": 31, "of-arp-spa": [None], "start": 0 } }, "order": 5 }, { "order": 6, "set-field": { "arp-source-transport-address": IPgw + "/32" } }, { "order": 7, "output-action": { "max-length": 0, "output-node-connector": "IN_PORT" } }] } }] } }] } with open( 'dataFlowARPGw/dataFlowARPGw' + nodeIDName + '-' + str(x) + '-' + str(address) + '.json', 'w') as outfile: json.dump(putJson, outfile, sort_keys=True, indent=4) #5th Flow putJson = { "flow": [{ "id": idNum + 100, "priority": 10, "flow-name": "FlowARP" + str(x), "match": { "ethernet-match": { "ethernet-type": { "type": 2048 } }, "ipv4-destination": IP + "/32" }, "table_id": 1, "instructions": { "instruction": [{ "order": 0, "go-to-table": { "table_id": 20 } }, { "order": 1, "apply-actions": { "action": [{ "order": 0, "set-dl-src-action": { "address": hardwareAddress } }, { "order": 1, "dec-nw-ttl": {} }] } }] } }] } with open( 'dataFlowL3/dataFlowL3' + nodeIDName + '-' + str(x) + '-' + str(address) + '.json', 'w') as outfile: json.dump(putJson, outfile, sort_keys=True, indent=4) #4th Flow putJson = { "flow": [{ "flow-name": "FlowL2", "id": idNum, "instructions": { "instruction": [{ "go-to-table": { "table_id": 20 }, "order": 0 }] }, "match": { "ethernet-match": { "ethernet-type": { "type": 2048 } }, "ipv4-destination": IPnet + "/24", "ipv4-source": IPnet + "/24" }, "priority": 50, "table_id": 1 }] } with open( 'dataFlowL2/dataFlowL2' + nodeIDName + '-' + str(x) + '-' + str(address) + '.json', 'w') as outfile: json.dump(putJson, outfile, sort_keys=True, indent=4) #4th Flow putJson = { "flow": [{ "flow-name": "FlowARPL2", "id": idNum, "instructions": { "instruction": [{ "order": 0, "apply-actions": { "action": [{ "order": 0, "output-action": { "output-node-connector": nodeConnector } }] } }] }, "match": { "ethernet-match": { "ethernet-destination": { "address": macAddress } } }, "priority": 100, "table_id": 30 }] } with open( 'dataFlowARPL2/dataFlowARPL2' + nodeIDName + '-' + str(x) + '-' + str(address) + '.json', 'w') as outfile: json.dump(putJson, outfile, sort_keys=True, indent=4) idNum = idNum + 1 #after 1 flow is finished for 1 port increase the ID by 1
def _object_path(obj: object, path: str) -> Iterable: yield from objectpath.Tree(obj).execute(path)
def getMetadata(artist, release): """ Data from public database Music brainz Parameters ---------- artist : str Name of artists to search in database release : str Name of song=release from artist for searching Returns ------- dict : information for table, with data about song """ # variable for result of appending data finalList = {} #set name for search musicbrainzngs.set_useragent( "python-musicbrainzngs-example", "0.1", "https://github.com/alastair/python-musicbrainzngs/", ) # getting data from web result = musicbrainzngs.search_releases(artist=artist, tracks=release, limit=1) # get to json format sorted_string = json.dumps(result, indent=4, sort_keys=True) #save fro parsing wjson = json.loads(sorted_string) jsonnn_tree = objectpath.Tree(wjson['release-list']) #iterate for data in strings IDval = 0 for (idx, release) in enumerate(result['release-list']): #goes once if 'date' in release: #check for existence finalList.update({"date": release['date']}) if 'country' in release: finalList.update({"country": release['country']}) if 'title' in release: finalList.update({"title": release['title']}) if 'packaging' in release: finalList.update({"packaging": release['packaging']}) if 'barcode' in release: finalList.update({"barcode": release['barcode']}) if 'status' in release: finalList.update({"status": release['status']}) if 'id' in release: finalList.update({"Release ID": release['id']}) IDval = release['id'] for (jdx, items) in enumerate(release): #iterovanie vo vsetkych repre = release[items] if 'text-representation' == items: if 'language' in (repre): finalList.update({"language": repre['language']}) if 'script' in (repre): finalList.update({"script": repre['script']}) if 'artist-credit' == items: try: tree = objectpath.Tree(release[items]) ent = tree.execute("$.artist[0]") for x in (ent): keyID = "Artist " + str(x) finalList.update({keyID: ent[x]}) except Exception: pass return finalList
def players(self, ): t = objectpath.Tree(self.yhandler.get_players_by_status(self.league_id, "A")) json = t.execute('$..(player)') return json