def search_inventory(value, host=env.TET.get("host"), api_key=env.TET_API_KEY, api_sec=env.TET_SEC): # Build URL url = f"https://{host}" restclient = RestClient(url, api_key=api_key, api_secret=api_sec, verify=True) payload = {"filter": {"type": "eq", "field": "ip", "value": value}} # HTTP Get Request response = restclient.post("/inventory/search", json_body=json.dumps(payload)) # If response code is 200, then return the json response if response.status_code == 200: # JSON Response inventory = response.json() if inventory["results"]: return inventory else: print(f"\nERROR: IP Address {value} can not be found.") exit() # If response code is anything but 200, print error message with response code else: print(f"\nSomething went wrong. Error code {response.status_code}.") exit()
def get_tetration_tagged_workspaces(): restclient = RestClient(tetration_url, api_key=tetration_api_key, api_secret=tetration_api_secret, verify=False) req_payload = { "filter": { "type": "and", "filters": [{ "type": "eq", "field": "user_Location", "value": aws_region }, { "type": "eq", "field": "user_Cloud Service", "value": "WorkSpaces" }] }, "scopeName": tetration_tenant, "dimensions": ['ip', 'host_uuid', "user_Cloud Service", "user_Location"], "limit": 2000 } resp = restclient.post('/inventory/search', json_body=json.dumps(req_payload)) if resp.status_code == 200: parsed_resp = json.loads(resp.content) return parsed_resp['results']
def app_upload(hostname, api_key, api_secret, source, validate_certs): result = {"ansible_facts": {'result': []}} api_endpoint = 'https://{0}'.format(hostname) restclient = RestClient(api_endpoint, api_key=api_key, api_secret=api_secret, verify=validate_certs) app_list = [] with open(source, 'r') as f: for data in oyaml.load_all(f): app_list.append(data) for i, app in enumerate(app_list): exists = False if scope['parent_app_scope_name']: resp = restclient.get('/openapi/v1/app_scopes') if not resp.status_code == 200: return ( 1, "Error {0}: {1} during connection attempt to {2}/openapi/v1/app_scopes. \n" .format(resp.status_code, resp.reason, api_endpoint)) current_scopes = json.loads(resp.content) for current_scope in current_scopes: if current_scope['short_name'] == scope['short_name']: exists = True if not exists: scope['parent_app_scope_id'] = ParentIDLookup( current_scopes, scope['parent_app_scope_name']) scope.pop('parent_app_scope_name') print('Posting scope {0} to the cluster'.format( scope['short_name'])) resp = restclient.post('/openapi/v1/app_scopes', json_body=json.dumps(scope)) if not resp.status_code == 200: return (1, "Error {0}: {1} creating scope {2}. \n{3}".format( resp.status_code, resp.reason, scope['short_name'], resp.json())) result['ansible_facts']['result'].append(resp.json()) if result['ansible_facts']['result']: result['changed'] = True else: result['changed'] = False return (0, result)
def upload(hostname,api_key,api_secret,source,validate_certs,owner_scope_id): result = {"ansible_facts": {}} api_endpoint = 'https://{0}'.format(hostname) restclient = RestClient(api_endpoint, api_key=api_key, api_secret=api_secret, verify=validate_certs) filter_list = [] with open(source, 'r') as f: for data in oyaml.load_all(f): filter_list.append(data) for filter in filter_list: filter['app_scope_id'] = owner_scope_id filter.pop('id') filter.pop('_id') resp = restclient.post('/openapi/v1/filters/inventories', json_body=json.dumps(filter)) if not resp.status_code == 200: return (1, "Error {0}: {1} during connection attempt to {2}/openapi/v1/filters/inventories. \n{3}".format(resp.status_code, resp.reason,api_endpoint,resp.json())) result['changed'] = True return (0, result)
inventory_filters = {} print("Creating Firewall Objects as Tetration Filters...") if not args.f: for key in tqdm(fw.networkObjects.keys()): filters = [] for ip in fw.networkObjects[key].ipSet(): filters.append({"field": "ip", "type": "eq", "value": ip}) post_data = { "name": 'fw_obj_' + key, "query": { "type": "or", "filters": filters }, 'app_scope_id': default_scope_id } resp = restclient.post('/openapi/v1/filters/inventories', json_body=json.dumps(post_data)).json() if type(resp) is dict and 'error' in resp.keys(): print('ERROR: ' + resp['error']) print( 'The Inventory Filter {} may already exist. Delete the filter and try again.' .format(post_data['name'])) print('Refer to instructions in the lab guide or ask lab proctor.') raise SystemExit inventory_filters[key] = resp['id'] print("Pushing Access Lists to Tetration for Auditing and Simulation...") absolute_policies = [] if not args.f: for acl in fw.accessLists.keys(): for fwrule in fw.accessLists[acl].rules: absolute_policies.append({
def main(): module = AnsibleModule( argument_spec=dict( api_key=dict(type='str', required=True), api_secret=dict(type='str', required=True), host=dict(type='str', required=True), api_version=dict(type='str', default='v1'), name=dict(type='str', required=True), method=dict(type='str', required=True, choices=['delete', 'get', 'post', 'put']), payload=dict(type='dict', required=False), params=dict(type='dict', required=False), ), # we can't predict if the proposed API call will make a change to the system supports_check_mode=False ) # if tetpyclient is not available, our only option is to fail try: import json import tetpyclient from tetpyclient import RestClient except ImportError: module.fail_json(msg="Some module dependencies are missing.") method = module.params['method'] api_name = '/openapi/' + module.params['api_version'] + '/' + module.params['name'] req_payload = module.params['payload'] restclient = RestClient( module.params['host'], api_key=module.params['api_key'], api_secret=module.params['api_secret'], verify=False ) # Do our best to provide "changed" status accurately, but it's not possible # as different Tetration APIs react differently to operations like creating # an element that already exists. changed = False if method == 'get': response = restclient.get(api_name, params=module.params['params']) elif method == 'delete': response = restclient.delete(api_name) changed = True if response.status_code / 100 == 2 else False elif method == 'post': response = restclient.post(api_name, json_body=json.dumps(req_payload)) changed = True if response.status_code / 100 == 2 else False elif method == 'put': response = restclient.put(api_name, json_body=json.dumps(req_payload)) changed = True if response.status_code / 100 == 2 else False # Put status_code in the return JSON. If the status_code is not 200, we # add the text that came from the REST call and the payload to make # debugging easier. result = {} result['status_code'] = response.status_code result['ok'] = response.ok result['reason'] = response.reason if int(response.status_code) / 100 == 2: result['json'] = response.json() else: result['text'] = response.text module.exit_json(changed=changed, **result)
"filters": [{ "type": "eq", "field": "address_type", "value": "IPV4" }] }, "scopeName": os.environ['APPSCOPE_NAME'], "limit": LIMIT, "offset": "" } # this block of code gets LIMIT number of IP addresses at a time from Tetration # and repeats as long as there are more addresses to retrieve ip_list = [] while True: resp = restclient.post('/openapi/v1/inventory/search', json_body=json.dumps(req_payload)) if resp.status_code == 200: parsed_resp = json.loads(resp.content) for item in parsed_resp["results"]: ip_list.append(item["ip"]) if "offset" in parsed_resp: req_payload["offset"] = parsed_resp["offset"] else: break else: break # save the results to a file with open(IP_FILENAME, 'w') as outfile:
# suppress warning message requests.packages.urllib3.disable_warnings() # Set Query value search_query = { "scopeName": "Housley", "limit": 15, "filter": { "type": "or", "filters": [{ "type": "contains", "field": "hostname", "value": "p17" }, { "type": "subnet", "field": "ip_address", "value": "172.18.17.0/24" }] } } inventory = client.post('/inventory/search', json_body=json.dumps(search_query)) # Convert Text to json json_output = json.loads(inventory.text) # print Json in format output pprint(json_output)
def main(): # Get the CLI arguements args = get_parser().parse_args() if args.debug: print ("\n") print ("Credentials file: " + args.credentials) print ("Tetration Platform: " + args.platform) print ("\n") API_ENDPOINT = args.platform API_CREDENTIALS = args.credentials # The HTTP Body should be sent in JSON format. The Python Client will set the Content-Type as # application/json now = datetime.now() t1 = now # - timedelta(minutes=360) t0 = t1 - timedelta(hours=args.hours) dst_port = args.port dst_ip = args.provider # Used to find all flows for the time window specified. Max is 24 hours, with 1000 records CURRENT_POST_PAYLOAD = { "t0": t0.strftime('%s'), "t1": t1.strftime('%s'), "limit": args.limit, "filter": { "type": "and", "filters": [] } } if dst_ip is not None: CURRENT_POST_PAYLOAD['filter']['filters'].append({ "type": args.filter, "field": "dst_address", "value": dst_ip ##dst_ip }) if dst_port is not None: CURRENT_POST_PAYLOAD['filter']['filters'].append({ "type": "eq", "field": "dst_port", "value": dst_port }) # Used to find the TopN Providers for the specified port CURRENT_POST_TOPN_PAYLOAD = { "t0": t0.strftime('%s'), "t1": t1.strftime('%s'), "dimension": "dst_address", "metric": "fwd_pkts", "filter": {"type": "eq", "field": "dst_port", "value": args.port}, "threshold": 10, # the N in the topN "scopeName": "Default" } if args.debug: print ("Flow Search Payload to POST: ") print json.dumps(CURRENT_POST_PAYLOAD, indent = 4) print ("\n\n") if args.debug: print ("Flow TopN Search Payload to POST: ") print json.dumps(CURRENT_POST_TOPN_PAYLOAD, indent = 4) print ("\n\n") # Create a new RestClient connection for API communication rc = RestClient(API_ENDPOINT, credentials_file=API_CREDENTIALS, verify=False) # Disable warnings requests.packages.urllib3.disable_warnings() # Post the CURRENT_POST_PAYLOAD to the API ENDPOINT resp = rc.post(CURRENT_POST_ENDPOINT,json_body=json.dumps(CURRENT_POST_PAYLOAD)) # Check for valid response if resp.status_code == 200: results = resp.json() #print results else: print "Unsuccessful request returned code: {} , response: {}".format(resp.status_code,resp.text) return flow_list = [] # Loop through the results, finding the specific flows returned for entry in results["results"]: flow_list.append((entry['src_hostname'], entry['src_address'], entry['src_port'], entry['dst_port'], entry['dst_address'],entry['dst_hostname'], entry['proto'], entry['fwd_pkts'], entry['rev_pkts'], entry['vrf_name'])) # Make use of tabulate, to assist with auto-formating a table for print with the collected data print (tabulate(flow_list, headers = ["Src Hostname", "Src Address", "Src Port", "Dst Port", "Dst Address", "Dst Hostname", "Protocol", "Fwd Pkts", "Rev Pkts", "VRF"], tablefmt="orgtbl")) print "\n" summary = "Total Flows: " + str(len(flow_list)) # Should match the CLI --limit or 1000 (as the max inbuilt value) print "=" * len(summary) print summary print "=" * len(summary) + "\n" # Do similar to above, flowsearch, this time identifying the Top N hosts in the communication topN_list = [] prov_count = 1 # New Remote Connection to query the TopN API, using the CURRENT_POST_TOPN_PAYLOAD resp = rc.post(CURRENT_POST_TOPN_ENDPOINT,json_body=json.dumps(CURRENT_POST_TOPN_PAYLOAD)) # Check for valid response (200 - ok) if resp.status_code == 200: results = resp.json() # Loop through the results returned for entry in results[0]['result']: # Perform a DNS lookup, using the local system's DNS providers - i.e. the host running this script dns_name = my_nslookup(entry['dst_address']) # For each entry, append to the list # #1, IP Address, DNS lookup, "" = empty for Source Address, Packet Count topN_list.append((prov_count, entry['dst_address'], dns_name, "", "Total: " + str(entry['fwd_pkts']))) # Perform a second query, this time matching all flows for # the port number specified AND the current provider 'dst_address' # Do this by calling my function, to return the JSON paylod required subsequent_query = topN_query(t0, t1, entry['dst_address'], args.port) sub_resp = rc.post(CURRENT_POST_TOPN_ENDPOINT,json_body=json.dumps(subsequent_query)) # Again, check for valid response if sub_resp.status_code == 200: cons_results = sub_resp.json() provider_seen_before = False for cons_entry in cons_results[0]['result']: if not provider_seen_before: # print entry = Provider IP Address, Source IP Address, Packet Count topN_list.append(("", "", "", cons_entry['src_address'], cons_entry['fwd_pkts'])) provider_seen_before = True else: # print entry = Provider empty as in the top line of this section, Source IP Address, Packet Count topN_list.append(("", "", "", cons_entry['src_address'], cons_entry['fwd_pkts'])) prov_count += 1 # Add a blank line between each Destination Provider for clarity - since all in the same topN_list separator = "-" * 15 topN_list.append((separator, separator, separator, separator, separator)) else: print "Unsuccessful request returned code: {} , response: {}".format(resp.status_code,resp.text) return # Print out the summary tables for the TopN Destination Providers and consuming Source Addresses per Provider print ("TopN Table : Summary for Source Addresses consuming the Destination Provider on Port: %s") % (str(args.port) + "\n") print (tabulate(topN_list, headers = ["#", "Top N\nProvider", "DNS Lookup", "TopN\nSource Address\nPer Provider", "Packets"], tablefmt="orgtbl", numalign="center", stralign="center")) print "\n"
'type': 'eq', 'field': 'ip', 'value': e }, app_tier.get('instances'))) req_payload = { 'short_name': f"{app_tier.get('name')}_{req_number}", 'description': 'Provisioned By Morpheus', 'short_query': { 'type': 'or', 'filters': filters }, 'parent_app_scope_id': MORPHEUS_SCOPE_ID } resp = tet_client.post('/app_scopes', json_body=json.dumps(req_payload)) # print(f"INFO: Tetration Scope Created: {resp.text}") print( f"INFO: Tetration Scope Created: {app_tier.get('name')}_{req_number}") ######################################################### ######################################################### # Create Tetration Apps ################################# ######################################################### print('INFO: Creating Tetration Applications') for app_tier in app_tiers: scopes = json.loads(tet_client.get('/openapi/v1/app_scopes/').text)
break print() req_payload = { "t0": "{}-{}-{}T{}:00:00+0900".format(SY, SM, SD, SH), "t1": "{}-{}-{}T{}:00:00+0900".format(EY, EM, ED, EH), "limit": 10, "filter": { "type": "eq", "field": "category", "value": category }, } resp = rc.post('/live_analysis/%s' % application_id, json_body=json.dumps(req_payload)) ana = resp.json() pd.set_option('display.max_columns', 1000) pd.set_option('display.max_rows', 2000) pd.set_option('display.max_colwidth', 2000) pd.options.display.width=None if len(ana) == 0: print('該当するフローはありません') print() sys.exit() offset = ana.get('Offset') Results = ana['Results']
tet_headers = { 'Content-Type': "application/json", 'Accept': "application/json" } with open('tet-scopecsvpython.csv') as csvfile: read = csv.DictReader(csvfile) for row in read: #print(row['field'], row['parent_app_scope_id'], row['short_name']) jdata_scope = { "short_name": str(row['short_name']), "short_query": { "field": "ip", "type": "subnet", "value": str(row['field']) }, "parent_app_scope_id": str(row['parent_app_scope_id']) } jdata_scope_out = json.dumps(jdata_scope, indent=True) payloadone = jdata_scope_out # print(payloadone) restclient = RestClient( api_ep, headers=tet_headers, credentials_file='/home/xxxx/cisco_tet/api_credentials.json', verify=False) resp = restclient.post('/app_scopes', json_body=payloadone) print(resp.text)
resp = rc.get('/applications/%s/policies' % application_id) policy = resp.json() d_pol = policy['default_policies'] ver = d_pol[0]['version'] req_payload = { "version": ver, "rank" : "DEFAULT", "policy_action" : "ALLOW", "priority" : 90, "consumer_filter_id" : cluster_id, "provider_filter_id" : scope_id, } rc.post('/applications/%s/policies' % application_id, json_body=json.dumps(req_payload)) resp = rc.get('/applications/%s/policies' % application_id) policy= resp.json() d_pol = policy['default_policies'] policy_id = [rx['id'] for rx in d_pol if (rx['consumer_filter_id'] == cluster_id and rx['provider_filter_id'] == scope_id)] policy_id = policy_id.pop() req_payload = { "version": ver, "start_port" : port, "end_port" : port, "proto" : proto, }
#!/usr/bin/env python ### use Tetration API to search flows from tetpyclient import RestClient import json import csv flows={} api_ep="https://tetcluster.xxx.xxx" tet_headers = { 'Content-Type': "application/json", 'Accept': "application/json" } jdata_fsearch = {"t0":"2019-10-01T00:00:00-0600","t1":"2019-10-01T17:13:00-0600","filter":{ "filters":[{"type":"subnet","field":"src_addr","value":"xxx.xxx.xxx.x/xx"},{"type":"in","field":"dst_port","values":["80","443"]}]},"scopeName":"Default:test:test:test:test","limit":1000} jdata_fsearch_out = json.dumps(jdata_fsearch, indent=True) payloadone = jdata_fsearch_out #print(payloadone) restclient = RestClient(api_ep, headers=tet_headers, credentials_file='/home/xxxx/cisco_tet/api_credentials.json', verify=False) resp = restclient.post('/flowsearch', json_body=payloadone) jdata_load = json.loads(resp.text) #jdata_flows_out = json.dumps(jdata_load, sort_keys=True, indent=True) #print(jdata_flows_out) for flow in jdata_load['results']: print 'src address:',flow['src_address'], 'dst address:',flow['dst_address'],'Protocol:', flow['proto'], 'dst_port:', flow['dst_port']
def scope_action(hostname, api_key, api_secret, scope_name, filters, parent_scope_name, commit, validate_certs): result = {"ansible_facts": {}} api_endpoint = 'https://{0}'.format(hostname) restclient = RestClient(api_endpoint, api_key=api_key, api_secret=api_secret, verify=validate_certs) # Getting the scopes to do parent_id lookup and check if scope already exists resp = restclient.get('/openapi/v1/app_scopes') if not resp.status_code == 200: return ( 1, "Error {0}: {1} during connection attempt to {2}/openapi/v1/app_scopes. \n" .format(resp.status_code, resp.reason, api_endpoint)) cluster_scopes = json.loads(resp.content) # De-capitalize and hyphenate any field keywords in the filters for i, filter in enumerate(filters): if filter['field'] == 'Address Type': newfield = filter['field'].lower().replace(' ', '_') filters[i]['field'] = newfield if filter['field'] == 'Address': filters[i]['field'] = 'ip' filters[i]['type'] = 'subnet' # Check if scope exists, and filter is the same for scope in cluster_scopes: if scope_name == scope['short_name'] and scope['query']['filters'][1][ 'filters'] == filters: result['changed'] = False return (0, result) #Parent ID lookup cluster_scope_dict = {x['short_name']: x['id'] for x in cluster_scopes} for cluster_scope in cluster_scope_dict.keys(): if cluster_scope == parent_scope_name: parent_scope_id = cluster_scope_dict[cluster_scope] #Build the json payload payload = json.dumps( dict(short_name=scope_name, short_query=dict(type='and', filters=filters), parent_app_scope_id=parent_scope_id)) #If scope already present on the cluster do a PUT, otherwise POST if scope_name in cluster_scope_dict: resp = restclient.put('/openapi/v1/app_scopes/{0}'.format( cluster_scope_dict[scope_name]), json_body=payload) else: resp = restclient.post('/openapi/v1/app_scopes', json_body=payload) if not resp.status_code == 200: return (1, "Error {0}: {1} posting scope to the cluster. \n{2}".format( resp.status_code, resp.reason, resp.json())) #If commit flag set, commit the change at the parent scope if commit: resp = restclient.post( '/openapi/v1/app_scopes/commit_dirty?root_app_scope_id={0}'.format( parent_scope_id), json_body=json.dumps({'sync': True})) if not resp.status_code == 200: return ( 1, "Error {0}: {1} committing scope query change. \n{2}".format( resp.status_code, resp.reason, resp.json())) result["ansible_facts"] = resp.json() result['changed'] = True return (0, result)
parser.add_argument("-t", "--tag", dest="tag", help="Tag to be added / modified. Ex: quarantine=yes", required=True) parser.add_argument("-i", "--ip", dest="ip", help="Add tag to this IP. Ex: 10.60.7.71", required=True) args = parser.parse_args() ip_value = args.ip tag_field = args.tag.split('=')[0] tag_value = args.tag.split('=')[1] requests.packages.urllib3.disable_warnings() rc = RestClient(CLUSTER_URL, credentials_file=CRED_FILE, verify=False) req_payload = {'ip': ip_value, 'attributes': {tag_field: tag_value}} resp = rc.post('/inventory/tags/' + ROOT_SCOPE, json_body=json.dumps(req_payload)) # Checking result if resp.status_code == 200: print("Done!") else: print("Error: HTTP status code is " + str(resp.status_code) + " and message is " + resp.content)
from tetpyclient import RestClient import json import requests.packages.urllib3 from datetime import datetime, timedelta CLUSTER_URL = "https://andromeda-aus.cisco.com" CRED_FILE = "./cred.json" ### Define SCOPE_NAME, start, and end variables here ### requests.packages.urllib3.disable_warnings() rc = RestClient(CLUSTER_URL, credentials_file=CRED_FILE, verify=False) req_payload = { "t0": start.strftime("%Y-%m-%dT%H:%M:%S-0000"), "t1": end.strftime("%Y-%m-%dT%H:%M:%S-0000"), "limit": 8, "filter": {}, "scopeName": SCOPE_NAME } resp = rc.post("<ENDPOINT>", json_body=json.dumps(req_payload)) if resp.status_code == 200: print(json.dumps(json.loads(resp.content), indent=4, sort_keys=True)) else: print("Error: HTTP status code is " + str(resp.status_code) + " and message is " + resp.content)
restclient = RestClient(API_ENDPOINT, credentials_file='credentials.json', verify=False) #GET appScopes = restclient.get('/app_scopes').json() #Iterate through JSON response for SearchStright and Print name and short_name, id, and query are also options among other for s in appScopes: if searchString in s["name"]: req_payload = { "scopeName": s["name"], # optional "filter": { "type": "and", "filters": [ { "type": "subnet", "field": "ip", "value": "0.0.0.0/0" }, ] } } #Iterate through scopes that match the searchString and get the inventory count scopeInv = restclient.post('/inventory/count', json_body=json.dumps(req_payload)).json() #If Inventory count is 0, delete scope and print name and id if scopeInv["count"] == 0: restclient.delete('/app_scopes/' + s["id"]) print(s["name"] + ' ID: ' + s["id"] + ' Count:' + str(scopeInv["count"]) + ' Deleted')
#Tetration IP address API_ENDPOINT="https://x.x.x.x" csvName="apps.csv" restclient = RestClient(API_ENDPOINT, credentials_file='credentials.json', verify=False) #Scope ID's that are used most Frequently, the ID can be found in the GUI by clicking the scope and viewing the ID in the URI defaultScope = "5b058fbe755f023c58c5a256" testScope = "" #Open CSV file with open(csvName, 'r') as read_obj: csv_reader = reader(read_obj) header = next(csv_reader) # Check file as empty if header != None: # Iterate over each row after the header in the csv # Creates Scope with name of 2 column in each row, and the query value in the first column of each row, Parent Scope ID must aslo be set. # Parent Scope ID can be found in the GUI by clicking the parent scope, and then the ID is in the URI for row in csv_reader: req_payload = { "short_name": row[1], "short_query": { "type":"contains", "field":"host_name", "value": row[0] }, "parent_app_scope_id": defaultScope } resp = restclient.post('/app_scopes', json_body=json.dumps(req_payload)) #print(resp.text) ##If Needed## #print(row[0],row[1]) ##Used to Print Test Rows to verify before pushing##
"filter": {}, "scopeName" : SCOPE, "metrics" : ['fwd_tcp_bottleneck', 'rev_tcp_bottleneck','fwd_rst_count','rev_rst_count','fwd_syn_count','fwd_tcp_pkts_retransmitted','rev_tcp_pkts_retransmitted','fwd_pkts','rev_pkts','fwd_tcp_handshake_usec','srtt_usec','server_app_latency_usec','total_network_latency_usec'], } else: req_payload = { "t0": timestamp, "t1": ""+(datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:00.000Z') + timedelta(minutes=1)).strftime("%Y-%m-%dT%H:%M:00.000Z")+"", "filter": {}, "scopeName" : SCOPE, "metrics" : ['fwd_tcp_bottleneck', 'rev_tcp_bottleneck','fwd_rst_count','rev_rst_count','fwd_syn_count','fwd_tcp_pkts_retransmitted','rev_tcp_pkts_retransmitted','fwd_pkts','rev_pkts','fwd_tcp_handshake_usec','srtt_usec','server_app_latency_usec','total_network_latency_usec'], "offset" : offset } #print(req_payload) try: resp = rc.post('/flowsearch', json_body=json.dumps(req_payload)) except: raw_input("Connection to Tetration Cluster API faild. Please ensure connectivity, check credentials and hit enter/return to try again.") #print("tetration API response to console for debug") #print( " Tetration API response: " + str(resp)) if resp.status_code == 200: #print TA json response for debug #print(resp.content) result = json.loads(resp.content.decode("utf-8")) if(result['results'] == None): print("No result") sys.exit(1)
"type": "subnet", "field": "src_address", "value": "172.18.17.0/24" }, { "type": "contains", "field": "dst_hostname", "value": "oc" }] } } # display query for debug - uncomment next line # pprint(search_query) # send query to get flows flows = client.post('/flowsearch', json_body=json.dumps(search_query)) # check http return code = looking for a 200 # 400 family is a malformed request - check JSON print("HTTP return code: %s " % (flows.status_code)) # add some error handling to check processing was ok try: # Convert Text to json json_output = json.loads(flows.text) # print Json in format output pprint(json_output) except: print(" ** Issue with request \n ** No output returned ")
API_ENDPOINT = 'https://tetbdc.myco.int/openapi/v1' rc = RestClient(API_ENDPOINT, api_secret=API_SECRET, api_key=API_KEY, verify=False) resp = rc.get(CURRENT_GET_ENDPOINT) # make sure we get a good response from the request if resp.status_code != 200: print("Unsuccessful request returned code: {} , \ response: {}".format(resp.status_code,resp.text)) results = resp.json() sc_checklist = {} for check in results: sc_checklist[check['name']] = { "name":check['name'], "id":check['id'], "dirty":check['dirty'], "root":check['root_app_scope_id'] } parent_scope = sc_checklist[scope_name] parent_scope_id = parent_scope["id"] root_scope_id = parent_scope["root"] print('parent: ' + parent_scope_id) print('root: ' + root_scope_id) # This is the dirty part - hehehe resp = rc.post("/app_scopes/commit_dirty?root_app_scope_id=" + root_scope_id) results = resp.json() print(results)
class Tetration_Helper(object): class Inventory(object): offset = '' pagedData = None hasNext = False def __init__(self, endpoint, api_key, api_secret, pigeon, options, tenant_app_scope="Default"): self.rc = RestClient(endpoint, api_key=api_key, api_secret=api_secret, verify=False) self.scopes = [] self.pigeon = pigeon self.inventory = self.Inventory() self.filters = {} self.options = options self.subnets = [] self.boolean = Boolean_Helper() self.tenant_app_scope = tenant_app_scope def GetSearchDimensions(self): resp = self.rc.get('/inventory/search/dimensions') return resp.json() def GetApplicationScopes(self): resp = self.rc.get('/app_scopes') if resp.status_code != 200: self.pigeon.status_code = '403' self.pigeon.note.update({ 'status_code': 403, 'message': 'Unable to get application scopes from tetration cluster', 'data': {} }) self.pigeon.send() exit(0) else: self.scopes = resp.json() def GetTenantNames(self): resp = self.rc.get('/vrfs') if resp.status_code != 200: self.pigeon.status_code = '403' self.pigeon.note.update({ 'status_code': 403, 'message': 'Unable to get application scopes from tetration cluster', 'data': {} }) self.pigeon.send() exit(0) else: return resp.json() def GetInventory(self, filters=None, dimensions=None): req_payload = { "filter": { "type": "or", "filters": filters }, "scopeName": self.tenant_app_scope, "dimensions": dimensions, "limit": self.options["limit"], "offset": self.inventory.offset if self.inventory else "" } resp = self.rc.post('/inventory/search', json_body=json.dumps(req_payload)) if resp.status_code != 200: self.pigeon.note.update({ 'status_code': 403, 'message': 'Unable to get inventory from tetration cluster', 'data': {} }) self.pigeon.send() exit(0) else: self.pigeon.note.update({ 'status_code': 100, 'message': 'Successfully retrieved inventory page from Tetration', 'data': {} }) self.pigeon.send() resp = resp.json() self.inventory.pagedData = resp['results'] self.inventory.offset = resp['offset'] if 'offset' in resp else '' self.inventory.hasNext = True if self.inventory.offset else False return def CreateInventoryFilters(self, network_list): inventoryDict = {} appScopeId = json.loads(os.environ['FILTERS_APP_SCOPE_ID'])[0]["value"] for row in network_list: if row['comment'] not in inventoryDict: inventoryDict[row['comment']] = {} inventoryDict[row['comment']]['app_scope_id'] = appScopeId inventoryDict[row['comment']]['name'] = row['comment'] inventoryDict[row['comment']][ 'primary'] = "TRUE" if self.boolean.GetBoolean( os.getenv('SCOPE_RESTRICTED', default=False)) else "FALSE" inventoryDict[row['comment']]['query'] = { "type": "or", "filters": [] } inventoryDict[row['comment']]['query']['filters'].append({ "type": "subnet", "field": "ip", "value": row['network'] }) self.filters = inventoryDict return def PushInventoryFilters(self): for inventoryFilter in self.filters: req_payload = self.filters[inventoryFilter] resp = self.rc.post('/filters/inventories', json_body=json.dumps(req_payload)) if resp.status_code != 200: self.pigeon.note.update({ 'status_code': 403, 'message': 'Error pushing inventory filters to tetration cluster', 'data': {} }) self.pigeon.send() return self.pigeon.note.update({ 'status_code': 100, 'message': 'Successfully posted inventory filters to Tetration cluster', 'data': {} }) self.pigeon.send() return def AnnotateHosts(self, hosts, columns, csvFile): with open(csvFile, "wb") as csv_file: fieldnames = ['IP'] for column in columns: if column["infobloxName"] != 'extattrs': fieldnames.extend([column["annotationName"]]) else: if column["overload"] == "on": fieldnames.extend([column["annotationName"]]) else: for attr in column["attrList"]: fieldnames.extend([ str(column["annotationName"]) + '-' + str(attr) ]) writer = csv.DictWriter(csv_file, fieldnames=fieldnames) writer.writeheader() for host in hosts: hostDict = {} hostDict["IP"] = host["ip_address"] # hostDict["VRF"] = [ tetHost["vrf_name"] for tetHost in self.inventory.pagedData if tetHost["ip"] == host["ip_address"] ][0] if len(host["names"]) < 1: continue for column in columns: if column["infobloxName"] == 'extattrs': for attr in column["attrList"]: if column["overload"] == "on": if attr["value"] in host["extattrs"]: hostDict[column["annotationName"]] = str( attr["value"] ) + '=' + str( host["extattrs"][ attr["value"]]["value"] ) + ';' if column[ "annotationName"] not in hostDict.keys( ) else hostDict[ column["annotationName"]] + str( attr["value"] ) + '=' + str(host["extattrs"][ attr["value"]]["value"]) + ';' else: hostDict[column["annotationName"]] = str( attr["value"] ) + '=;' if column[ "annotationName"] not in hostDict.keys( ) else str(hostDict[ column["annotationName"]]) + str( attr["value"]) + '=;' else: if attr["value"] in host["extattrs"]: hostDict[column["annotationName"] + '-' + attr["value"]] = host["extattrs"][ attr["value"]]["value"] else: hostDict[column["annotationName"] + '-' + attr["value"]] = '' elif column["infobloxName"] == 'zone': hostDict[column["annotationName"]] = '.'.join(",".join( host["names"]).split('.')[1:]) elif column["infobloxName"] == 'names': hostDict[column["annotationName"]] = ",".join( host[column["infobloxName"]]).split('.')[0] else: hostDict[column["annotationName"]] = host[ column["infobloxName"]] writer.writerow(hostDict) #keys = ['IP', 'VRF'] #req_payload = [tetpyclient.MultiPartOption(key='X-Tetration-Key', val=keys), tetpyclient.MultiPartOption(key='X-Tetration-Oper', val='add')] #resp = self.rc.upload(csvFile, '/assets/cmdb/upload', req_payload) req_payload = [ tetpyclient.MultiPartOption(key='X-Tetration-Oper', val='add') ] resp = self.rc.upload(csvFile, '/assets/cmdb/upload/' + self.tenant_app_scope, req_payload) if resp.status_code != 200: self.pigeon.note.update({ 'status_code': 403, 'message': 'Error posting annotations to Tetration cluster', 'data': {} }) self.pigeon.send() return else: self.pigeon.note.update({ 'status_code': 100, 'message': 'Successfully posted annotations to Tetration cluster', 'data': {} }) self.pigeon.send() def AddSubnets(self, subnets): for subnet in subnets: self.subnets.append(IPNetwork(subnet)) def HasSubnetFilterForIp(self, ip): for subnet in self.subnets: addr = IPAddress(ip) if subnet.__contains__(addr) is True or addr.is_private( ) is not True: return True return False