Beispiel #1
0
from env import config
import meraki

dashboard = meraki.DashboardAPI(config['MERAKI_KEY'])


def get_organizations():
    response = dashboard.organizations.getOrganizations()
    return response


if __name__ == "__main__":
    for org in get_organizations():
        print(f"Organization ID {org['id']} has the name {org['name']}")
Beispiel #2
0
    def __init__(self):
        self.dashboard = meraki.DashboardAPI(api_key=None, base_url='https://api.meraki.com/api/v1/',log_file_prefix=__file__[:-3], print_console=False)

        self.updStart()
        return
Beispiel #3
0
from flask import (
    Flask,
    render_template,
    send_from_directory,
    send_file,
    request,
    jsonify,
    Response,
)

import diff
import util
import argparser

VERSION = '0.15.0'
dashboard = meraki.DashboardAPI()

def determine_path():
    """Borrowed from wxglade.py"""
    try:
        root = __file__
        if os.path.islink(root):
            root = os.path.realpath(root)
        return os.path.dirname(os.path.abspath(root))
    except:
        print("I'm sorry, but something is wrong.")
        print("There is no __file__ variable. Please contact the author.")
        sys.exit()


def is_hot_reload():
Beispiel #4
0
__copyright__ = "Copyright (c) 2020 Cisco and/or its affiliates."
__license__ = "Cisco Sample Code License, Version 1.1"

from flask import render_template, flash, redirect, url_for, request
from app import app
import logging
import pprint
import meraki as mk
import TopDeskHelper

pp = pprint.PrettyPrinter(indent=4)
org_id = app.config['MERAKI_ORGANIZATION_ID']
user = app.config['TOPDESK_USERNAME']
password = app.config['TOPDESK_PASSWORD']
url = app.config['TOPDESK_URL']
dashboard = mk.DashboardAPI(app.config['MERAKI_API_KEY'])

logging.basicConfig(
    filename='app.log',
    filemode='a',
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')


# webhook handler
@app.route('/', methods=['POST'])
def alert():
    if not request.json:
        return ("invalid data", 400)
    logging.warning(request.json)
    print(request.json["alertType"])
    TopDeskHelper.create_incident(url, user, password, request.json)
    api_key = config["api_key"]
    orgName = config["orgName"]
    tag_prefix = 'viptela-11'
    org_id = None


# function to parse list of tags for an individual network
def strip_meraki_network_tags(meraki_network_tag):
    # below parses the for the specific network tag on the network w/ viptela-
    meraki_tag_strip_part1 = re.findall(r'[v]+[i]+[p]+[t]+[e]+[l]+[a]+[-].*',\
         str(meraki_network_tag))
    return str(meraki_tag_strip_part1[0]).rstrip()


# writing function to obtain org ID via linking ORG name
mdashboard = meraki.DashboardAPI(MerakiConfig.api_key)
result_org_id = mdashboard.organizations.getOrganizations()
for x in result_org_id:
    if x['name'] == MerakiConfig.org_name:
        MerakiConfig.org_id = x['id']


# defining function that creates dictionary of IPsec config
def get_meraki_ipsec_config(name, public_ip, lan_subnets, secret,
                            network_tags) -> dict:
    ipsec_config = {
        "name": name,
        "publicIp": public_ip,
        "privateSubnets": [lan_subnets],
        "secret": secret,
        "ikeVersion": "2",
def ingest_dashboard_data(accounts, log):
    append_log(log, "dashboard_monitor::ingest_dashboard_data::Accounts -",
               accounts)
    dt = make_aware(datetime.datetime.now())

    for sa in accounts:
        if not sa.sync_enabled:
            append_log(
                log,
                "dashboard_monitor::digest_database_data::sync session not set to allow sync;"
            )
            return

        a = sa.dashboard
        append_log(log, "dashboard_monitor::ingest_dashboard_data::Resync -",
                   a.description)
        dashboard = meraki.DashboardAPI(base_url=a.baseurl,
                                        api_key=a.apikey,
                                        print_console=False,
                                        output_log=False,
                                        caller=settings.CUSTOM_UA,
                                        suppress_logging=True)
        orgs = a.organization.all()
        if orgs:
            for org in orgs:
                org_id = org.orgid
                append_log(log, "dashboard_monitor::processing orgid::",
                           org_id)
                sgts = meraki_read_sgt(dashboard, org_id)
                sgacls = meraki_read_sgacl(dashboard, org_id)
                sgpolicies = meraki_read_sgpolicy(dashboard, org_id)
                append_log(
                    log, "dashboard_monitor::ingest_dashboard_data::SGTs - ",
                    len(sgts))
                append_log(
                    log, "dashboard_monitor::ingest_dashboard_data::SGACLs - ",
                    len(sgacls))
                append_log(
                    log,
                    "dashboard_monitor::ingest_dashboard_data::Policies - ",
                    len(sgpolicies))

                merge_sgts("meraki", sgts, not sa.ise_source, sa, log, org)
                merge_sgacls("meraki", sgacls, not sa.ise_source, sa, log, org)
                merge_sgpolicies("meraki", sgpolicies, not sa.ise_source, sa,
                                 log, org)

                clean_sgts("meraki", sgts, not sa.ise_source, sa, log, org)
                clean_sgacls("meraki", sgacls, not sa.ise_source, sa, log, org)
                clean_sgpolicies("meraki", sgpolicies, not sa.ise_source, sa,
                                 log, org)

                org.raw_data = json.dumps({
                    "groups": sgts,
                    "acls": sgacls,
                    "bindings": sgpolicies
                })
                org.force_rebuild = False
                org.last_sync = dt
                org.last_update = dt
                org.skip_sync = True
                org.save()
                sa.dashboard.last_sync = dt
                sa.dashboard.save()
        else:
            append_log(
                log,
                "dashboard_monitor::ingest_dashboard_data::No OrgId present")
Beispiel #7
0
if __name__ == "__main__":
	# Importing variables
	api_key = credentials.api_key
	baseurl = credentials.base_url
	org_id = credentials.organization_id
	networks = credentials.networks
	cams = credentials.cams
	students = student_list.student_list
	webex_email = credentials.webex_email
	webex_token = credentials.webex_token

	# Instantiate Meraki Python SDK Client
	dashboard = meraki.DashboardAPI(
			api_key=api_key,
			base_url=baseurl,
			log_file_prefix='./logs/attendance/',
			print_console=False)

	# Instantiate AWS Python SDK Clients
	# Must configure AWS CLI for this to work
	# See https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-configure.html
	rekognition = boto3.client('rekognition', region_name='us-east-2')
	dynamodb = boto3.client('dynamodb', region_name='us-east-2')
	webex = WebexTeamsAPI(access_token=webex_token)

	# Record timestamp for snapshot name
	timestr = time.strftime("%Y%m%d-%H%M%S")

	file_name = snapshot.snap(
			dashboard_api_client=dashboard,
Beispiel #8
0
import meraki # Import Meraki SDK
import firebase_admin # Import Firebase Python SDK
from firebase_admin import credentials
from firebase_admin import db
from credentials import home_api_key, home_network_id, mv_serial # Separate file with credentials

# Fetch the service account key JSON file contents
cred = credentials.Certificate('serviceAccountKey.json')
# Initialize the app with a service account, granting admin privileges
firebase_admin.initialize_app(cred, {
    'databaseURL': 'https://dashboard-cisco.firebaseio.com/'
})
# Authenticate Meraki 
dashboard = meraki.DashboardAPI(
    api_key = home_api_key,
    base_url = 'https://n255.meraki.com/api/v0'
)

# Get the snapshot from a specific timestamp
def get_snapshot_url(time):	
	body = {'timestamp': time, 'fullframe': False}
	snapshot = dashboard.cameras.generateNetworkCameraSnapshot(network_id, serial, **body)
	return snapshot['url']

if __name__ = "__main__":
	photo_url = get_snapshot_url('2020-05-11T06:19:34-07:00')
	# Upload to Firebase
	ref = db.reference('meraki/photoURL/')
	ref.set(photo_url)
Beispiel #9
0
import requests
import meraki
import json
from private_vars import api_key, org_id

url = "https://api.meraki.com/api/v1"
dashboard = meraki.DashboardAPI(api_key, )

payload = {}
headers = {
    "Content-Type": "application/json",
    "Accept": "application/json",
    "X-Cisco-Meraki-API-Key": api_key
}

response = requests.request("GET",
                            f"{url}/organizations",
                            headers=headers,
                            data=payload).json()

org_id_dict = dict()
network_id_dict = dict()

#Iterate over each item in the response and print it separately
for i in response:
    name = i['name']
    id = i['id']
    org_id_dict[name] = id
    # print(f"The Organization {name}'s ID is {id}")

# Write the org_id_dict dictionary to a JSON file
import meraki
import credentials

api_key = credentials.api_key
access_token = credentials.access_token

# initiating meraki objects provding the access token and API key
dashboard = meraki.DashboardAPI(api_key=api_key,
                                print_console=False,
                                output_log=False)

my_orgs = dashboard.organizations.getOrganizations()

for org in my_orgs:
    print("Org name: ", org["name"], "Org ID: ", org["id"])

input_org_id = input("Enter Organization ID: ")

networks = dashboard.organizations.getOrganizationNetworks(
    organizationId=input_org_id)

for network in networks:
    group_policies = dashboard.networks.getNetworkGroupPolicies(
        networkId=network["id"])

    group_policy_id = ""

    # declare the policy name to be provision
    provision_policy = "SmartPhone Group Policy"

    # grabbing the policy ID of interest
Beispiel #11
0
def main():
    # Check if all required parameters have been specified
    parser = argparse.ArgumentParser()
    api_key, org_id = parse_arguments(parser)

    if not (api_key and org_id):
        parser.exit(2, parser.print_help())

    # Make API calls to retrieve data
    dashboard = meraki.DashboardAPI(api_key)
    appliance_statuses = dashboard.appliance.getOrganizationApplianceUplinkStatuses(
        org_id, total_pages='all')
    device_statuses = dashboard.organizations.getOrganizationDevicesStatuses(
        org_id, total_pages='all')
    networks = dashboard.organizations.getOrganizationNetworks(
        org_id, total_pages='all')
    devices_by_serial = {d['serial']: d['name'] for d in device_statuses}
    networks_by_id = {n['id']: n['name'] for n in networks}

    # Output appliance statuses file
    output_file = 'appliance_statuses.csv'
    field_names = [
        'name', 'serial', 'network', 'networkId', 'lastReportedAt',
        'wan1_status', 'wan1_ip', 'wan1_gateway', 'wan1_publicIp',
        'wan1_primaryDns', 'wan1_secondaryDns', 'wan1_ipAssignedBy',
        'wan2_status', 'wan2_ip', 'wan2_gateway', 'wan2_publicIp',
        'wan2_primaryDns', 'wan2_secondaryDns', 'wan2_ipAssignedBy',
        'cellular_status', 'cellular_ip', 'cellular_provider',
        'cellular_publicIp', 'cellular_model', 'cellular_signalStat',
        'cellular_connectionType', 'cellular_apn'
    ]
    with open(output_file, mode='w', newline='\n') as fp:
        csv_writer = csv.DictWriter(fp,
                                    field_names,
                                    delimiter=',',
                                    quotechar='"',
                                    quoting=csv.QUOTE_ALL)
        csv_writer.writeheader()
        for status in appliance_statuses:
            status.update({
                'name': devices_by_serial[status['serial']],
                'network': networks_by_id[status['networkId']]
            })

            # Flatten objects/dictionaries, without requiring a third-party library
            interfaces = [uplink['interface'] for uplink in status['uplinks']]
            if 'wan1' in interfaces:
                wan1 = status['uplinks'][interfaces.index('wan1')]
                status.update({
                    'wan1_status': wan1['status'],
                    'wan1_ip': wan1['ip'],
                    'wan1_gateway': wan1['gateway'],
                    'wan1_publicIp': wan1['publicIp'],
                    'wan1_primaryDns': wan1['primaryDns'],
                    'wan1_secondaryDns': wan1['secondaryDns'],
                    'wan1_ipAssignedBy': wan1['ipAssignedBy']
                })
            if 'wan2' in interfaces:
                wan2 = status['uplinks'][interfaces.index('wan2')]
                status.update({
                    'wan2_status': wan2['status'],
                    'wan2_ip': wan2['ip'],
                    'wan2_gateway': wan2['gateway'],
                    'wan2_publicIp': wan2['publicIp'],
                    'wan2_primaryDns': wan2['primaryDns'],
                    'wan2_secondaryDns': wan2['secondaryDns'],
                    'wan2_ipAssignedBy': wan2['ipAssignedBy']
                })
            if 'cellular' in interfaces:
                cellular = status['uplinks'][interfaces.index('cellular')]
                status.update({
                    'cellular_status':
                    cellular['status'],
                    'cellular_ip':
                    cellular['ip'],
                    'cellular_provider':
                    cellular['provider'],
                    'cellular_publicIp':
                    cellular['publicIp'],
                    'cellular_model':
                    cellular['model'],
                    'cellular_signalStat':
                    cellular['signalStat'],
                    'cellular_connectionType':
                    cellular['connectionType'],
                    'cellular_apn':
                    cellular['apn']
                })
            status.pop('uplinks')
            csv_writer.writerow(status)

    # Output device statuses file
    output_file = 'device_statuses.csv'
    field_names = [
        'name', 'serial', 'network', 'networkId', 'mac', 'publicIp', 'status',
        'lastReportedAt', 'lanIp', 'gateway', 'ipType', 'primaryDns',
        'secondaryDns', 'usingCellularFailover', 'wan1Ip', 'wan1Gateway',
        'wan1IpType', 'wan1PrimaryDns', 'wan1SecondaryDns', 'wan2Ip',
        'wan2Gateway', 'wan2IpType', 'wan2PrimaryDns', 'wan2SecondaryDns'
    ]
    with open(output_file, mode='w', newline='\n') as fp:
        csv_writer = csv.DictWriter(fp,
                                    field_names,
                                    delimiter=',',
                                    quotechar='"',
                                    quoting=csv.QUOTE_ALL)
        csv_writer.writeheader()
        for status in device_statuses:
            status.update({'network': networks_by_id[status['networkId']]})
            csv_writer.writerow(status)
def main(argv):
    # Set default values for command line arguments
    api_key = org_id = arg_mode = None

    # Get command line arguments
    try:
        opts, args = getopt.getopt(argv, 'hk:o:')
    except getopt.GetoptError:
        print_help()
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print_help()
            sys.exit()
        elif opt == '-k':
            api_key = arg
        elif opt == '-o':
            org_id = arg

    # Check if all required parameters have been input
    if api_key == None or org_id == None:
        print_help()
        sys.exit(2)

    # Instantiate a Meraki dashboard API session
    dashboard = meraki.DashboardAPI(
        api_key,
        output_log=False
        #log_file_prefix=os.path.basename(__file__)[:-3],
        #log_path='',
        #print_console=False
    )

    # Get list of current networks in org
    networks = dashboard.organizations.getOrganizationNetworks(org_id)

    # Iterate through all networks and load client lists
    # initialize variables
    clientDetailsDst = []
    clientDetailsSrc = []
    for network in networks:
        #print(f"Evaluating network {network['id']} : {network['name']}")
        # Skip if network does not have the tag "update_whitelist"
        if 'copy_client_names_src' in network['tags']:
            # Get client details for src network
            print(f"Matched source network {network['name']}")
            netIdSrc = network['id']
            clientDetailsSrc = dashboard.networks.getNetworkClients(
                network['id'], timespan=2678400, perPage=1000)
        elif 'copy_client_names_dst' in network['tags']:
            print(f"Matched dest network {network['name']}")
            netIdDst = network['id']
            clientDetailsDst = dashboard.networks.getNetworkClients(
                network['id'], timespan=2678400, perPage=1000)
        elif network['tags'] is None or 'copy_client_names' not in network[
                'tags']:
            continue

    # Iterate through each client in dst network and attempt to match from src
    #print(f'source client json: \n {clientDetailsSrc}')
    #print(f'destination client json: \n {clientDetailsDst}')
    for client in clientDetailsDst:
        print(f"Current Client: {client['mac']}, {client['description']}")
        matchedItem = next(
            (item
             for item in clientDetailsSrc if item["mac"] == client["mac"]),
            None)
        if matchedItem is None:
            continue
        else:
            toProvision = [{
                'mac': matchedItem['mac'],
                'name': matchedItem['description']
            }]
            dashboard.networks.provisionNetworkClients(netIdDst, toProvision,
                                                       'Normal')
Beispiel #13
0
import meraki

dashboard = meraki.DashboardAPI(output_log=False)

# To do
# Move AP corp network after RWS usage is finished
# Move AP to room network when RWS usage starts

ssid_definition = {
    'authMode': 'psk',
    'bandSelection': 'Dual band operation',
    'enabled': False,
    'encryptionMode': 'wpa',
    'ipAssignmentMode': 'Bridge mode',
    'lanIsolationEnabled': False,
    'minBitrate': 11,
    'name': '',
    'perClientBandwidthLimitDown': 0,
    'perClientBandwidthLimitUp': 0,
    'psk': '',
    'splashPage': 'None',
    'ssidAdminAccessible': False,
    'useVlanTagging': False,
    'wpaEncryptionMode': 'WPA2 only'
}


def get_org_names(hotelid):
    orgs = dashboard.organizations.getOrganizations()
    return [org['id'] for org in orgs]
Beispiel #14
0
#! Python3

from datetime import datetime as dt
from re import match
import meraki
import csv
import sys

if __name__ == "__main__":

    start_time = dt.now()

    try:
        m = meraki.DashboardAPI("7e1b8674f0cd64850602befdc8b4941bab1e28a7")
        organization_id = "630503947831870169"
        networks = m.organizations.getOrganizationNetworks(organization_id,
                                                           perPage=10000,
                                                           total_pages="all")
        input_file = "C:\\Users\\lamin\\OneDrive\\Documents\\Repos\\Meraki\\network_names_and_syslog_server_role.csv"

        search_list = {}
        for network in networks:
            search_list[network["name"]] = network["id"]

        with open(input_file, "r") as ifile:
            network_file = csv.reader(ifile, delimiter=',', quotechar='|')
            for line in network_file:
                network_name = line[0]
                server_role = line[1]
                network_id = search_list[network_name.rstrip()]
                syslog_servers = [{
Beispiel #15
0
IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
or implied.
"""
# This script retrieves a Meraki MX L3 Firewall rule from the file NewRuleToAdd.txt and adds it to all
# networks in the Org. The rule inserted only specifies the Destination IPs (destCidr field) and uses
# the comment specified in the first line of the input file for the "comment" field of the rule
# all other parameters for the rule are specified in templateRuleDict below for all insertions.

import meraki
import time
import sys
import config
import requests


dashboard = meraki.DashboardAPI(api_key=config.meraki_api_key)

templateRuleDict= {
        "comment": "",
        "policy": "deny",
        "protocol": "any",
        "srcPort": "Any",
        "srcCidr": "Any",
        "destPort": "Any",
        "destCidr": "",
        "syslogEnabled": True
    }

templateRuleDictNoSyslog= {
        "comment": "",
        "policy": "deny",
# This script will prompt the user to select an organization, source switch,
# source port and destination switch and port to clone a port config, before
# confirming if the user would like to proceed with the change

# Configure your API key ENV variable using 'export MERAKI_DASHBOARD_API_KEY=YOUR_KEY'

# Usage: python port-clone.py

import meraki
import os
import re
import pprint

# Instantiate the Meraki Dashboard API using the Python SDK
dashboard = meraki.DashboardAPI(os.environ['MERAKI_DASHBOARD_API_KEY'])

# Get the list of organizations that the user has priveleges on
orgs = dashboard.organizations.getOrganizations()

# Print the list of organization names
for i, org in enumerate(orgs):
    print (i, ":", org['name'])

# Prompt the user for which organization they'd like to configure
selected_org = int(input("Which organization would you like to select? "))

# Get the list of devices in the selected organization
org_devices = dashboard.organizations.getOrganizationDevices(
    orgs[selected_org]['id'], total_pages='all'
)
import os
import sys
import json
import meraki
from wireless import Wireless
from appliance import Appliance
from switch import Switch
from totalNetwork import TotalNetwork

baseURL = "https://api.meraki.com/api/v1"
API_KEY = 'exampleKey'  # first api key
apiKey = 'exampleKey'  # second api key

dashboard = meraki.DashboardAPI(api_key=API_KEY,
                                base_url=baseURL,
                                print_console=False)
organizations = dashboard.organizations.getOrganizations()
networks = dashboard.organizations.getOrganizationNetworks(
    organizations[0]['id'])
# print(json.dumps(networks, indent=2))
# you can choose to set these variables manually or if they are unknown you can use the api calls above
# to find them then set the index ([0]) that coresponds to the wanted network and orginization
networkID = networks[0]['id']
orgID = organizations[0]['id']

# Retrieve list of devices
devices = dashboard.networks.getNetworkDevices(networkID)
numDevices = len(devices)
# print(json.dumps(devices, indent=2))

snapshot = {}
Beispiel #18
0
def main():
    # getting arguments
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "-K",
        "--apikey",
        help="Meraki dashboard API key or set env var",
        type=str,
        required=False,
    )
    parser.add_argument("-O",
                        "--organization",
                        help="organization ID or NAME",
                        type=str,
                        required=False)
    parser.add_argument("-N",
                        "--network",
                        help="network",
                        type=str,
                        required=False)
    parser.add_argument(
        "-P",
        "--protocol",
        help="filter protocol",
        type=str,
        choices=["cdp", "lldp"],
        default="",
        required=False,
    )
    parser.add_argument(
        "-A",
        "--all",
        help="print information for all networks in organization",
        dest="all",
        action="store_true",
        default=False,
        required=False,
    )

    args = parser.parse_args()
    org = args.organization
    protocol = args.protocol
    organization = org
    all = args.all

    # verify apikey is provided
    if args.apikey:
        apikey = args.apikey
    else:
        try:
            apikey = os.environ["apikey"]
        except:
            print(
                "\nERROR: MISSING MERAKI DASHBOARD API KEY IN ARGUMENTS AND ENV VAR\n"
            )
            sys.exit()

    # verify apikey is valid and get organizations
    try:
        m = meraki.DashboardAPI(api_key=apikey,
                                print_console=False,
                                output_log=False,
                                suppress_logging=True)
        orgs = m.organizations.getOrganizations()
    except:
        print("\nERROR GETTING ORGANIZATIONS - VERIFY API KEY IS CORRECT\n")
        sys.exit()

    # if no organization is provided print list of organizations
    if not args.organization:
        print("\nORGANIZATIONS AVAILABLE\n")
        for org in orgs:
            print(f"NAME: {org.get('name'):40} ID: {org.get('id'):20}")
        sys.exit()

    if organization:
        try:
            orgId, orgNname = getIdName(args.organization,
                                        orgs)  # verify organization exists
        except:
            print(f"\nERROR: ORGANIZATION {args.organization} NOT FOUND\n")
            sys.exit()
        try:
            network = args.network
        except:
            network = False
        if orgId:
            networks = m.organizations.getOrganizationNetworks(orgId)
            if network:  # if network is provided print neighbors
                try:
                    netId, netName = getIdName(network, networks)
                except:
                    print(f"\nERROR: NETWORK {network} NOT FOUND\n")
                    sys.exit()
                if netId:  # verify network exists
                    deviceList = m.networks.getNetworkDevices(netId)
                    for device in deviceList:
                        serial, name = device.get("serial"), device.get(
                            "name", device.get("serial", "MISSING"))
                        printNei(m, serial, name, protocol)
            else:  # if no network is specified print network list
                if not all:
                    print(
                        f'\nNETWORKS AVAILABLE FOR ORGANIZATION "{orgNname}" with ID {orgId}\n'
                    )
                    networks = m.organizations.getOrganizationNetworks(orgId)
                    if isinstance(networks[0], str):
                        print(f"ERROR GETTING NETWORKS: {networks[0]}\n")
                        sys.exit()
                    else:
                        for net in networks:
                            print(
                                f"NETWORK: {net.get('name'):50} ID: {net.get('id'):20}"
                            )
                else:  # if all flag is set print neigh for all networks
                    for net in networks:
                        netId = net.get("id")
                        deviceList = m.networks.getNetworkDevices(netId)
                        for device in deviceList:
                            serial, name = device.get("serial"), device.get(
                                "name", device.get("serial", "MISSING"))
                            printNei(m, serial, name, protocol)
def digest_database_data(sa, log):
    append_log(log, "dashboard_monitor::digest_database_data::Account -", sa)
    dashboard = meraki.DashboardAPI(base_url=sa.dashboard.baseurl,
                                    api_key=sa.dashboard.apikey,
                                    print_console=False,
                                    output_log=False,
                                    caller=settings.CUSTOM_UA,
                                    suppress_logging=True)

    if not sa.apply_changes:
        append_log(
            log,
            "dashboard_monitor::digest_database_data::sync session not set to apply changes;"
        )
        return

    tags = TagData.objects.filter(Q(tag__do_sync=True) & Q(update_failed=False)).\
        exclude(organization=None)
    for o in tags:
        if o.source_id and o.update_dest() == "meraki":
            if o.tag.push_delete:
                try:
                    ret = meraki_delete_sgt(dashboard, o.organization.orgid,
                                            o.source_id)
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::SGT delete",
                        ret)
                    o.delete()
                except Exception as e:  # pragma: no cover
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::SGT Delete Exception",
                        e, traceback.format_exc())
                    o.update_failed = True
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {
                        "tag": str(o),
                        "error": "Exception: " + str(e)
                    }
                    o.save()
            else:
                try:
                    ret = meraki_update_sgt(dashboard,
                                            o.organization.orgid,
                                            o.source_id,
                                            name=o.tag.name,
                                            description=o.tag.description,
                                            value=o.tag.tag_number)
                    o.last_update_data = json.dumps(ret)
                    if "groupId" in ret:
                        o.last_update_state = "True"
                        o.source_id = ret["groupId"]
                        o.source_data = json.dumps(ret)
                    else:
                        o.last_update_state = "False"
                    o.last_update = make_aware(datetime.datetime.now())
                    o.save()
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::Push SGT update",
                        o.source_id, o.tag.name, o.tag.description, ret)
                except Exception as e:  # pragma: no cover
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::SGT Update Exception",
                        e, traceback.format_exc())
                    o.update_failed = True
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {
                        "tag": str(o),
                        "error": "Exception: " + str(e)
                    }
                    o.save()
        elif o.update_dest() == "meraki":
            try:
                ret = meraki_create_sgt(dashboard,
                                        o.organization.orgid,
                                        value=o.tag.tag_number,
                                        name=o.tag.name,
                                        description=o.tag.description)
                o.last_update_data = json.dumps(ret)
                if "groupId" in ret:
                    o.last_update_state = "True"
                    o.source_id = ret["groupId"]
                    o.source_data = json.dumps(ret)
                else:
                    o.last_update_state = "False"
                o.last_update = make_aware(datetime.datetime.now())
                o.save()
                append_log(
                    log,
                    "dashboard_monitor::digest_database_data::Push SGT create",
                    o.tag.tag_number, o.tag.name, o.tag.description, ret)
            except Exception as e:  # pragma: no cover
                append_log(
                    log,
                    "dashboard_monitor::digest_database_data::SGT Create Exception",
                    e, traceback.format_exc())
                o.update_failed = True
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_state = "False"
                o.last_update_data = {
                    "tag": str(o),
                    "error": "Exception: " + str(e)
                }
                o.save()

    acls = ACLData.objects.filter(Q(acl__do_sync=True) & Q(update_failed=False)).\
        exclude(organization=None)
    for o in acls:
        if o.source_id and o.update_dest() == "meraki":
            if o.acl.push_delete:
                try:
                    ret = meraki_delete_sgacl(dashboard, o.organization.orgid,
                                              o.source_id)
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::SGACL delete",
                        ret)
                    o.delete()
                except Exception as e:  # pragma: no cover
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::SGACL Delete Exception",
                        e, traceback.format_exc())
                    o.update_failed = True
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {
                        "acl": str(o),
                        "error": "Exception: " + str(e)
                    }
                    o.save()
            else:
                try:
                    ret = meraki_update_sgacl(dashboard,
                                              o.organization.orgid,
                                              o.source_id,
                                              name=o.acl.name,
                                              description=o.acl.description,
                                              rules=o.lookup_rules(o),
                                              ipVersion=o.lookup_version(o))
                    o.last_update_data = json.dumps(ret)
                    if "aclId" in ret:
                        o.last_update_state = "True"
                        o.source_id = ret["aclId"]
                        o.source_data = json.dumps(ret)
                    else:
                        o.last_update_state = "False"
                    o.last_update = make_aware(datetime.datetime.now())
                    o.save()
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::Push SGACL update",
                        o.source_id, o.acl.name, o.acl.description, ret)
                except Exception as e:  # pragma: no cover
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::SGACL Update Exception",
                        e, traceback.format_exc())
                    o.update_failed = True
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {
                        "acl": str(o),
                        "error": "Exception: " + str(e)
                    }
                    o.save()
        elif o.update_dest() == "meraki":
            try:
                ret = meraki_create_sgacl(dashboard,
                                          o.organization.orgid,
                                          name=o.acl.name,
                                          description=o.acl.description,
                                          rules=list(o.lookup_rules(o)),
                                          ipVersion=o.lookup_version(o))
                o.last_update_data = json.dumps(ret)
                if "aclId" in ret:
                    o.last_update_state = "True"
                    o.source_id = ret["aclId"]
                    o.source_data = json.dumps(ret)
                else:
                    o.last_update_state = "False"
                o.last_update = make_aware(datetime.datetime.now())
                o.save()
                append_log(
                    log,
                    "dashboard_monitor::digest_database_data::Push SGACL create",
                    o.acl.name, o.acl.description, ret)
            except Exception as e:  # pragma: no cover
                append_log(
                    log,
                    "dashboard_monitor::digest_database_data::SGACL Create Exception",
                    e, traceback.format_exc())
                o.update_failed = True
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_state = "False"
                o.last_update_data = {
                    "acl": str(o),
                    "error": "Exception: " + str(e)
                }
                o.save()

    policies = PolicyData.objects.filter(Q(policy__do_sync=True) & Q(update_failed=False)).\
        exclude(organization=None)
    for o in policies:
        if o.policy.push_delete and o.update_dest() == "meraki":
            try:
                srcsgt, dstsgt = o.policy.lookup_sgts(o)
                orgs = sa.dashboard.organization.all()
                for org in orgs:
                    ret = meraki_update_sgpolicy(dashboard,
                                                 org.orgid,
                                                 srcGroupId=srcsgt.source_id,
                                                 dstGroupId=dstsgt.source_id,
                                                 aclIds=None,
                                                 catchAllRule="global")
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::Policy delete",
                        ret)
                    o.delete()
            except Exception as e:  # pragma: no cover
                append_log(
                    log,
                    "dashboard_monitor::digest_database_data::Policy Delete Exception",
                    e, traceback.format_exc())
                o.update_failed = True
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_state = "False"
                o.last_update_data = {
                    "policy": str(o),
                    "error": "Exception: " + str(e)
                }
                o.save()
        elif o.update_dest() == "meraki":
            try:
                srcsgt, dstsgt = o.lookup_sgt_data(o)
                sgacl = o.lookup_sgacl_data(o)
                acls = []
                if sgacl:
                    for s in sgacl:
                        acls.append(s.source_id)

                if not srcsgt or not dstsgt or sgacl is None:
                    o.update_failed = False  # was True; disabled for now
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {
                        "policy":
                        str(o),
                        "error":
                        "Meraki: Unable to locate sgt/acl data;" +
                        str(srcsgt) + ";" + str(dstsgt) + ";" + str(sgacl)
                    }
                    o.save()
                    continue

                ret = meraki_update_sgpolicy(
                    dashboard,
                    o.organization.orgid,
                    name=o.policy.name,
                    description=o.policy.description,
                    srcGroupId=srcsgt.source_id,
                    dstGroupId=dstsgt.source_id,
                    aclIds=acls,
                    catchAllRule=o.lookup_acl_catchall(o),
                    bindingEnabled=True,
                    monitorModeEnabled=False)
                o.last_update_data = json.dumps(ret)
                if "srcGroupId" in ret:
                    o.last_update_state = "True"
                    o.source_id = "s" + str(ret["srcGroupId"]) + "-d" + str(
                        ret["dstGroupId"])
                    o.source_data = json.dumps(ret)
                else:
                    o.last_update_state = "False"
                o.last_update = make_aware(datetime.datetime.now())
                o.save()
                append_log(
                    log,
                    "dashboard_monitor::digest_database_data::Push Policy update",
                    o.source_id, o.policy.name, o.policy.description, ret)
            except Exception as e:  # pragma: no cover
                append_log(
                    log,
                    "dashboard_monitor::digest_database_data::Policy Update Exception",
                    e, traceback.format_exc())
                o.update_failed = True
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_state = "False"
                o.last_update_data = {
                    "policy": str(o),
                    "error": "Exception: " + str(e)
                }
                o.save()
import meraki
from private_vars import api_key, org_id
import json

dashboard = meraki.DashboardAPI(api_key,
                                suppress_logging=True,
                                print_console=False)

all_networks = dashboard.organizations.getOrganizationNetworks(org_id)
#print(type(all_networks[0]))

network_id = all_networks[0]["id"]
#print(network_id)

network_devices = dashboard.networks.getNetworkDevices(network_id)
network_updates = dashboard.networks.getNetworkFirmwareUpgrades(network_id)

print(network_devices)
print(network_updates)
#
# --------------------------------------------------------
# Main
# --------------------------------------------------------
#
if __name__ == "__main__":
    os.system('clear')  # clear screen
    print(info)
    print(100 * '-', '\n\n')
    input('press enter')

    # get API key
    API_KEY = get_API_KEY()

    # Instantiate a Meraki dashboard API session
    dashboard_call = meraki.DashboardAPI(
        api_key=API_KEY,
        base_url='https://api-mp.meraki.com/api/v1/',
        log_file_prefix=os.path.basename(__file__)[:-3],
        log_path='./logs/',
        print_console=False)

    # run API call
    # ---------------
    #
    organizationId = get_org_id()
    rename_org(organizationId)
    list_orgs()

#
def main():
    # Instantiate a Meraki dashboard API session
    dashboard = meraki.DashboardAPI(
        api_key='',
        base_url='https://api-mp.meraki.com/api/v0/',
        output_log=True,
        log_file_prefix=os.path.basename(__file__)[:-3],
        log_path='',
        print_console=False)

    # Get list of organizations to which API key has access
    organizations = dashboard.organizations.getOrganizations()

    # Iterate through list of orgs
    for org in organizations:
        print(f'\nAnalyzing organization {org["name"]}:')
        org_id = org['id']

        # Get list of networks in organization
        try:
            networks = dashboard.networks.getOrganizationNetworks(org_id)
        except meraki.APIError as e:
            print(f'Meraki API error: {e}')
            print(f'status code = {e.status}')
            print(f'reason = {e.reason}')
            print(f'error = {e.message}')
            continue
        except Exception as e:
            print(f'some other error: {e}')
            continue

        # Create local folder
        todays_date = f'{datetime.now():%Y-%m-%d}'
        folder_name = f'Org {org_id} clients {todays_date}'
        if folder_name not in os.listdir():
            os.mkdir(folder_name)

        # Iterate through networks
        total = len(networks)
        counter = 1
        print(
            f'  - iterating through {total} networks in organization {org_id}')
        for net in networks:
            print(
                f'Finding clients in network {net["name"]} ({counter} of {total})'
            )
            try:
                # Get list of clients on network, filtering on timespan of last 14 days
                clients = dashboard.clients.getNetworkClients(
                    net['id'],
                    timespan=60 * 60 * 24 * 14,
                    perPage=1000,
                    total_pages='all')
            except meraki.APIError as e:
                print(f'Meraki API error: {e}')
                print(f'status code = {e.status}')
                print(f'reason = {e.reason}')
                print(f'error = {e.message}')
            except Exception as e:
                print(f'some other error: {e}')
            else:
                if clients:
                    # Write to file
                    file_name = f'{net["name"]}.csv'
                    output_file = open(f'{folder_name}/{file_name}',
                                       mode='w',
                                       newline='\n')
                    field_names = clients[0].keys()
                    csv_writer = csv.DictWriter(output_file,
                                                field_names,
                                                delimiter=',',
                                                quotechar='"',
                                                quoting=csv.QUOTE_ALL)
                    csv_writer.writeheader()
                    csv_writer.writerows(clients)
                    output_file.close()
                    print(f'  - found {len(clients)}')

            counter += 1

        # Stitch together one consolidated CSV per org
        output_file = open(f'{folder_name}.csv', mode='w', newline='\n')
        field_names = [
            'id', 'mac', 'description', 'ip', 'ip6', 'ip6Local', 'user',
            'firstSeen', 'lastSeen', 'manufacturer', 'os',
            'recentDeviceSerial', 'recentDeviceName', 'recentDeviceMac',
            'ssid', 'vlan', 'switchport', 'usage', 'status', 'notes',
            'smInstalled', 'groupPolicy8021x'
        ]
        field_names.insert(0, "Network Name")
        field_names.insert(1, "Network ID")

        csv_writer = csv.DictWriter(output_file,
                                    field_names,
                                    delimiter=',',
                                    quotechar='"',
                                    quoting=csv.QUOTE_ALL)
        csv_writer.writeheader()
        for net in networks:
            file_name = f'{net["name"]}.csv'
            if file_name in os.listdir(folder_name):
                with open(f'{folder_name}/{file_name}') as input_file:
                    csv_reader = csv.DictReader(input_file,
                                                delimiter=',',
                                                quotechar='"',
                                                quoting=csv.QUOTE_ALL)
                    next(csv_reader)
                    for row in csv_reader:
                        row['Network Name'] = net['name']
                        row['Network ID'] = net['id']
                        csv_writer.writerow(row)
Beispiel #23
0
def main(org_id, timespan):
    # Instantiate a Meraki dashboard API session
    dashboard = meraki.DashboardAPI(
        base_url='https://api-mp.meraki.com/api/v0/',
        print_console=False,
        output_log=False,
    )

    # Get list of API usage data and start the output csv string
    apiUsage = dashboard.api_usage.getOrganizationApiRequests(
        org_id, timespan=timespan, total_pages=-1)
    csvString = 'method,host,path,queryString,tsDate,tsTime,responseCode,sourceIp,userAgent,'
    csvString += 'implementation,implementationVersion,distro,distroVersion,system,systemRelease,'
    csvString += 'cpu,be_geo_id,caller\r\n'
    cumulativeAPIcalls = 0
    for use in apiUsage:
        csvString += use['method'] + ','
        csvString += use['host'] + ','
        csvString += use['path'] + ','
        csvString += use['queryString'] + ','
        csvString += use['ts'].split('T')[0] + ','
        csvString += use['ts'].split('T')[1].replace('Z', '') + ','
        csvString += str(use['responseCode']) + ','
        csvString += use['sourceIp'] + ','

        # Special  User-Agent processing
        if 'python-meraki' in use['userAgent']:
            print(use['userAgent'])
            userAgent = use['userAgent'].split(' ')
            csvString += userAgent[0] + ','
            if len(userAgent) > 1:
                if "implementation" in userAgent[1]:
                    userAgentDict = json.loads(
                        urllib.parse.unquote(userAgent[1]))
                    csvString += userAgentDict['implementation']['name'] + ','
                    csvString += userAgentDict['implementation'][
                        'version'] + ','
                    csvString += userAgentDict['distro']['name'] + ','
                    csvString += userAgentDict['distro']['version'] + ','
                    csvString += userAgentDict['system']['name'] + ','
                    csvString += userAgentDict['system']['release'] + ','
                    csvString += userAgentDict['cpu'] + ','
                    if "be_geo_id" in userAgentDict:
                        csvString += userAgentDict['be_geo_id'] + ','
                    else:
                        csvString += ','
                    if "application" in userAgentDict:
                        csvString += userAgentDict['application'] + ','
                    elif "caller" in userAgentDict:
                        csvString += userAgentDict['caller'] + ','
                    else:
                        csvString += ','
                else:
                    csvString += ',,,,,,,,,'
            else:
                csvString += ',,,,,,,,,'
        else:
            csvString += use['userAgent'] + ','
            csvString += ',,,,,,,,,'

        csvString += '\r\n'

    # Output the file
    now = datetime.now()
    dt_string = now.strftime("%Y-%m-%d_%H-%M-%S")
    filename = org_id + '_' + str(timespan) + '_' + dt_string + '.csv'
    file = open(filename, 'w')
    file.write(csvString)
    file.close()
    print('Results written to ' + filename)
import meraki

apikey = '<Introduce tu llave de API aqui>'
baseurl = 'https://api.Meraki.com/api/v0'

# Declara un objeto de API del dashboard de Meraki en la variable dashboard
dashboard = meraki.DashboardAPI(api_key=apikey, base_url=baseurl)

# Obtén todas las organizaciones a las que tiene acceso esta llave de API
response = dashboard.organizations.getOrganizations()

# itera sobre la respuesta e imprime por fila

for row in response:
    print(row)
    print("ID de Organización: " + str(row['id']))
    print("Nombre de Organización: " + row['name'])
Beispiel #25
0
import meraki  # Import Meraki SDK
from datetime import datetime  # Import Date & Time Library
import firebase_admin  # Import Firebase Python SDK
from firebase_admin import credentials
from firebase_admin import db
from credentials import ise_api_key, ise_network_id  # Separate file with credentials

# Fetch the service account key JSON file contents
cred = credentials.Certificate('serviceAccountKey.json')
# Initialize the app with a service account, granting admin privileges
firebase_admin.initialize_app(
    cred, {'databaseURL': 'https://dashboard-cisco.firebaseio.com/'})

# Initialize Meraki Dashboard with api_key authentication
dashboard = meraki.DashboardAPI(api_key=ise_api_key,
                                base_url='https://n143.meraki.com/api/v0')


# Get Network Devices Models
def get_devices_model():
    devices = dashboard.devices.getNetworkDevices(ise_network_id)
    device_model = []
    for device in devices:
        device_model.append(device['model'][:2])
    return device_model


# Get Network Clients
def get_network_clients():
    network_clients = dashboard.clients.getNetworkClients(ise_network_id)
    clients_status = []
Beispiel #26
0
key = None

try:
    serial = sys.argv[1]
except:
    print("No serial provided")
    sys.exit(0)

try:
    key = sys.argv[2]
except:
    print("No API key provided")
    sys.exit(0)

dash = meraki.DashboardAPI(key,
                           output_log=False,
                           print_console=False,
                           suppress_logging=False)


def on_connect(client, userData, flags, rc):
    """
    * Callback function for client connection
    * Means a CONNACK was received
    """
    client.subscribe("/merakimv/" + serial + "/raw_detections")


previous_time = None


def on_message(client, userData, msg):
Beispiel #27
0
from datetime import datetime as dt
from time import sleep
from re import search
import meraki
import sys

if __name__ == "__main__":

    start_time = dt.now()

    try:
        # Unique Meraki API key from the Meraki Dashboard
        api_key = "7e1b8674f0cd64850602befdc8b4941bab1e28a7"

        # Initiating the API session and creating API object to use in API queries
        m = meraki.DashboardAPI(api_key)

        output_file = "C:\\Users\\lamin\\OneDrive\\Documents\\Repos\\Meraki\\devices_report.csv"

        with open(output_file, "w") as ofile:

            orgs = m.organizations.getOrganizations()
            stats = {}

            for org in orgs:
                count = 0
                devices = m.organizations.getOrganizationInventoryDevices(
                    org['id'],
                    total_pages="all",
                    startingAfter="2020-03-01T00:00:00.000000Z",
                    usedState="used")
Beispiel #28
0
def main():
    import time
    # client_query() # this queries current org and all client information and builds database
    # exit()

    # Fire up Meraki API and build DB's

    log_dir = os.path.join(os.getcwd(), "Logs/")
    if not os.path.exists(log_dir):
        os.makedirs(log_dir)
    db = meraki.DashboardAPI(
        api_key=g.get_api_key(),
        base_url='https://api.meraki.com/api/v1/',
        print_console=False,
        output_log=True,
        log_file_prefix=os.path.basename(__file__)[:-3],
        log_path='Logs/',
    )
    cfg = loadCFG(db)

    th_array = []
    tag_target = cfg['tag_target']
    tag_master = cfg['tag_master']
    #    adminEmails = cfg['adminEmails']
    orgs_whitelist = cfg['whitelist']
    WRITE = cfg['WRITE']
    SWITCH = cfg['SWITCH']

    th = tagHelper2.tagHelper(db, tag_target, tag_master, orgs_whitelist)
    orgs = th.orgs  #get a lit of orgs

    #Master ChangeLog Helper
    clh = changelogHelper.changelogHelper(db, orgs)
    clh.ignoreAPI = False  #make sure it'll trigger on API changes too, default is TRUE

    clh_clones = changelogHelper.changelogHelper(db, orgs)
    clh_clones.tag_target = tag_target  #this sets the TAG so it'll detect additions of new networks during runtime

    loop = True  #Set this to false to break loop

    mNets = {}  #Dictionary of {'net_id': <mnet_obj>}
    master_netid = None
    mr_obj = []  #collect the networks
    last_changes = []
    longest_loop = 0
    loop_count = 0
    while loop:
        print()
        print(
            f'\t{bcolors.HEADER}****************************{bcolors.FAIL}START LOOP{bcolors.HEADER}*****************************'
        )
        print(bcolors.ENDC)
        startTime = time.time()

        if WRITE:
            print(
                f'{bcolors.OKGREEN}WRITE MODE[{bcolors.WARNING}ENABLED{bcolors.OKGREEN}]{bcolors.ENDC}'
            )
        else:
            print(
                f'{bcolors.OKGREEN}WRITE MODE[{bcolors.WARNING}DISABLED{bcolors.OKGREEN}]{bcolors.ENDC}'
            )

        # TagHelper sync networks
        th.sync()  #taghelper, look for any new networks inscope

        print()
        #Master Loader section
        netCount = 0
        for thn in th.nets:
            netCount += 1
            if loop_count == 0:
                print(
                    f'{bc.WARNING}Network #{netCount} of [{len(th.nets)}] networks {bc.ENDC}'
                )

            if not tag_master in th.nets[thn]['tags']:
                clh_clones.addNetwork(thn)  #this goes into the CLONES bucket
                if not thn in mNets:
                    mNets[thn] = mNET(db, thn, WRITE).loadCache()
            else:
                if not thn in mNets:
                    mNets[thn] = mNET(db, thn, WRITE).loadCache()
                if master_netid != thn:
                    master_netid = thn
                    clh.clearNetworks()  #wipes out previous master
                    print(f'MASTER NETWORK change to netid[{thn}]')
                    clh.addNetwork(thn)

        print()
        print(f'Master WL[{clh.watch_list}]')
        print(f'Clones WL[{clh_clones.watch_list}]')

        th.show()  #show inscope networks/orgs

        #Cleanup for old mNET objects which have been removed from scope
        delList = []
        for mid in mNets:  #cleanup
            if not mid in th.nets:
                delList.append(mid)

        for mid in delList:
            mNets.pop(mid)
            print(f'Dropping network[{mid}] from mNets DB')
            clh_clones.delNetwork(mid)
            if master_netid == mid:  #assuming the master is changed/removed
                clh.delNetwork(mid)  #remove it from changeloghelper
                master_netid = None

        if master_netid == None:
            print(f'Something went wrong, no master netid!!!')
            continue

        print()
        master_change = clh.hasChange()
        clone_change = clh_clones.hasChange()
        print(
            f'{bcolors.OKGREEN}Changes Master[{bcolors.WARNING}{master_change}{bcolors.OKGREEN}] Clone[{bcolors.WARNING}{clone_change}{bcolors.OKGREEN}]'
        )
        print()

        print(
            f'{bcolors.OKGREEN}Loop Count[{bcolors.WARNING}{loop_count}{bcolors.OKGREEN}]'
        )

        print()

        if clone_change:  #if there's a change to clones, run a short loop syncing just those networks
            print(
                f'{bcolors.FAIL}Change in a target Network Detected:{bcolors.Blink} Initializing Sync{bcolors.ENDC}'
            )
            inscope_clones = clh_clones.changed_nets  #gets list of networks changed
            for ic in inscope_clones:
                print(f'New Network detected!!!')
                if not ic in mNets:
                    mNets[ic] = mNET(db, ic, WRITE).loadCache()
                    mNets[ic].cloneFrom(mNets[master_netid])
                else:
                    mNets[ic].sync()
                    mNets[ic].cloneFrom(mNets[master_netid])

        elif master_change:
            print(
                f'{bcolors.FAIL}Master change Detected:{bcolors.Blink} Syncing Networks{bcolors.ENDC}'
            )
            mcCount = 0
            mNets[master_netid].sync()
            avgTime = 0
            for net in mNets:
                if net == master_netid: continue
                mcCount += 1
                secondsGuess = avgTime * (len(th.nets) - 1 - mcCount)
                print(
                    f'{bc.WARNING}Network #{mcCount} of [{len(th.nets)-1}] networks. AvgTime[{round(avgTime,1)}] seconds. Estimated [{round(secondsGuess/60,1)}] minutes left{bc.ENDC}'
                )
                startT = time.time()

                #Niftly little workaround for finding "out of compliance" networks, if there's an exception or error, re-sync and try again
                tries = 1
                while tries > 0:
                    try:
                        mNets[net].cloneFrom(mNets[master_netid])
                        tries = 0
                    except:
                        #potentially something changed...
                        print(
                            f'\t{bc.FAIL}ERROR:{bc.OKBLUE} Something changed in network [{bc.WARNING}{mNets[net].name}{bc.OKBLUE}]. Re-Syncing network and trying again....{bc.ENDC}'
                        )
                        mNets[net].sync()
                    tries -= 1

                endT = time.time()
                dur = round(endT - startT, 2)
                if avgTime == 0:
                    avgTime = dur
                else:
                    avgTime = (avgTime + dur) / 2

        else:
            print(
                f'{bc.OKBLUE}No changes detected in target networks{bc.ENDC}')

        print()

        loop_count += 1

        print()
        endTime = time.time()
        duration = round(endTime - startTime, 2)
        if duration > longest_loop: longest_loop = duration
        #print()
        if duration < 60:
            print(
                f'\t{bcolors.OKBLUE}Loop completed in {bcolors.WARNING}{duration}{bcolors.OKBLUE} seconds'
            )
        else:
            duration = round(duration / 60, 2)
            print(
                f'\t{bcolors.OKBLUE}Loop completed in {bcolors.WARNING}{duration}{bcolors.OKBLUE} minutes'
            )

        total_networks = len(mNets)
        print(
            f'\t{bcolors.OKBLUE}Total Networks in scope{bcolors.BLINK_FAIL} {total_networks}{bcolors.ENDC}'
        )
        mins = round(longest_loop / 60, 2)
        print(
            f'\t{bcolors.OKBLUE}Longest Loop [{bcolors.WARNING} {mins} {bcolors.OKBLUE}] minutes{bcolors.ENDC}'
        )

        print()
        print(
            f'\t{bcolors.HEADER}****************************{bcolors.FAIL}END LOOP{bcolors.HEADER}*****************************'
        )
        print(bcolors.ENDC)
        print()

        time.sleep(5)
        #while count_sleep > 0:
        #    time.sleep(1)
        #       #     print(f'{bcolors.OKGREEN}z')
        #    count_sleep -= 1
        #print(bcolors.ENDC)
        print()
import meraki

# Defining your API key as a variable in source code is not recommended
API_KEY = '6bec40cf957de430a6f1f2baa056b99a4fac9ea0'
# Instead, use an environment variable as shown under the Usage section
# @ https://github.com/meraki/dashboard-api-python/

dashboard = meraki.DashboardAPI(API_KEY)

network_id = 'L_646829496481105433'
url = 'https://www.example.com/path'

response = dashboard.networks.createNetworkWebhooksWebhookTest(
    network_id,
    url,
    sharedSecret='shhh',
    payloadTemplateId='wpt_00001',
    payloadTemplateName='Payload Template',
    alertTypeId='power_supply_down')

print(response)
#!/usr/bin/env python

import os
import sys
import json
import meraki

#Instantiate the client (API consumer class)
DASHBOARD = meraki.DashboardAPI(api_key=os.environ['MERAKI_API_KEY'],
                                base_url='https://api.meraki.com/api/v1',
                                print_console=False)

NETWORK = os.environ['NETWORK']


#Update the attributes of an SSID or create a new SSID - definition of values in config_ssid.py file
def update_ssid():

    print("Reading the configuration values from json file ...")

    # Reading config variables from JSON file, this info could come from external Database as well
    with open('config_ssid.json') as json_file:
        config_ssid = json.load(json_file)

    print("Updating Wireless SSID ...")

    try:
        resp = DASHBOARD.wireless.updateNetworkWirelessSsid(
            networkId=NETWORK,
            number=config_ssid['number'],
            name=config_ssid['name'],