def index(request): # Manipulates POST to retreive necessary information from SPARK using GET webhook = json.loads(request.body) print(webhook['data']['id']) result = spark().sendSparkGET( 'https://api.ciscospark.com/v1/messages/{0}'.format( webhook['data']['id']), bearer) result = json.loads(result) print(result) msg = "" if webhook['data']['personEmail'] != bot_email: in_message = result.get('text', '').lower() username = result.get('personEmail', '').lower() in_message = in_message.replace(bot_name, '') # Help function if '/help' in in_message: msg = ( "You asked for help? Let me explain what I can do:\n\n" "I am APIC-EM Bot, and I can help you interact with your favorite Campus Network Controller via Spark! For instance:" "\n- Send '/devices' to see information about active network devices." "\n- Send '/hosts' to see information about attached hosts." "\n- Send '/vlans' to see information about active VLANs.\n\n" "Contact [email protected] for support, questions, or friendly feedback." ) # Available query option: getVlans method from apic class elif '/vlans' in in_message: response = apic().getVlans(APIC, newTicket) string = 'Here are the active VLANs on your network:\n' msg = string + response # vailable query option: getDevices method from apic class elif '/devices' in in_message: response = apic().getDevices(APIC, newTicket) string = 'Here are the active devices on your network:\n' msg = string + response # Available query option: getVlans method from apic class + Spark removes redundant spaces # For sake of proper table spacing, ' ' is used to hard code spacing elif '/hosts' in in_message: response = apic().getHosts(APIC, newTicket) string = 'Here are the attached hosts on your network:' msg = string + response else: # Message returned in case apic-em bot does not understand query msg = "Uh oh! Looks like I didn't quite understand that. Try asking for '/help' to see what I can do!" # Posts bot response to Spark room if msg != "": print(msg) spark().sendSparkPOST("https://api.ciscospark.com/v1/messages", { "roomId": webhook['data']['roomId'], "markdown": msg }, bearer) return "true"
""" import requests, json # Imports necessary standard packages from APIC import apic # Imports 'apic' class from 'APIC.py' for APIC-EM methods from SPARK import spark # Imports 'spark' class from 'SPARK.py' for Spark methods from itty import * # Imports itty, which facilitates ngrok communication # Global variables APIC = 'devnetapi.cisco.com/sandbox/apic_em' # APIC-EM IP address USERNAME = '******' # APIC-EM username PASSWORD = '******' # APIC-EM password # Invoke getTicket method to generate unique service ticket, # necessary for making any API calls to APIC-EM Controller newTicket = apic().getTicket(APIC, USERNAME, PASSWORD) # Main function @post( '/' ) # The code within 'index(request)' is executed whenever POST API is sent to ngrok server def index(request): # Manipulates POST to retreive necessary information from SPARK using GET webhook = json.loads(request.body) print(webhook['data']['id']) result = spark().sendSparkGET( 'https://api.ciscospark.com/v1/messages/{0}'.format( webhook['data']['id']), bearer) result = json.loads(result)
import requests, json #imports necessary standard packages from APIC import apic #imports 'apic' class from 'APIC.py' for APIC-EM methods requests.packages.urllib3.disable_warnings() # Defines global variables APIC = 'devnetapi.cisco.com/sandbox/apic_em' USERNAME = '******' PASSWORD = '******' # This calls on the getTicket method to generate a unique service ticket, # necessary for making any API calls to the APIC-EM newTicket = apic().getTicket(APIC, USERNAME, PASSWORD) # This will later live under a lengthy 'if' statement depending on what # request the user has made in the body of their Spark message, but presently # it's an example of an API call and returns the json response msg = apic().getVlans(APIC, newTicket) #print msg #test = open('mydata.json').read() # Convert JSON to Python dict parsed = json.loads(msg) apicList = parsed['response'] refined = apic().removeDup(apicList) # Print list of sparkValues for value in refined: print value