Exemplo n.º 1
0
    def __init__(self, log=None):
        self.__ssl_context = None
        dbs = ISEServer.objects.filter(pxgrid_enable=True)
        c = '{"hostname": [], "nodename": "", "clientcert": "", "clientkey": "", "clientkeypassword": "",' \
            '"servercert": "", "password": "", "description": "", "id": null, "verbose": true, "services": null,' \
            '"port": 8910, "service_details": null, "service": "com.cisco.ise.config.trustsec",' \
            '"topic": "securityGroupTopic,securityGroupAclTopic", "session_dedup": false,' \
            '"subscribe_all": false}'
        self.config = json.loads(c, object_hook=lambda d: Namespace(**d))

        if len(dbs) > 0:
            append_log(
                log,
                "pxgrid_monitor::pxgrid_config::Establishing websocket for pxGrid..."
            )
            db = dbs[0]
            self.config.hostname = [db.pxgrid_ip]
            self.config.nodename = db.pxgrid_cliname
            self.config.clientcert = str(db.pxgrid_clicert.file)
            self.config.clientkey = str(db.pxgrid_clikey.file)
            self.config.clientkeypassword = db.pxgrid_clipw
            self.config.servercert = str(db.pxgrid_isecert.file)
            self.config.id = db.id
        else:
            self.config.id = None
            append_log(
                log,
                "pxgrid_monitor::pxgrid_config::No pxGrid servers configured..."
            )
def read_json_file(in_filename, log):
    fail_count = 0
    while True:
        try:
            r = read_file_all(in_filename)
            out = json.loads(r)
            return out
        except Exception as e:
            fail_count += 1
            time.sleep(1)
            append_log(log, "dashboard_monitor::read_json_file::", fail_count,
                       e)
def process_webhook(request):
    log = []
    if request.method == 'POST':
        whdata = json.loads(request.body)
        append_log(log, "webhook post", whdata)

        dbs = Dashboard.objects.filter(webhook_enable=True)
        if len(dbs) > 0:
            db = dbs[0]
            db.force_rebuild = True
            db.save()
            append_log(log, "setting dashboard to force rebuild")

        db_log("dashboard_webhook", log)

    return HttpResponse("Send webhooks here as POST requests.")
def job():
    log = []
    try:
        ret = run()
        if ret is not False:
            scheduler.remove_job("dashboard_webhook")
            append_log(log, "Webhook Monitor started")
        else:
            append_log(
                log,
                "Dashboard webhook configuration not present. Will check again..."
            )
        db_log("dashboard_webhook", log)
    except Exception as e:
        append_log(
            log,
            "#### Dashboard webhooks are not configured: dashboard_webhook", e)
        db_log("dashboard_webhook", log)
def parse_url(request):
    log = []
    baseurl = "/".join(request.build_absolute_uri().split("/")[:3])
    p = request.path.replace("/meraki/api/v1/organizations/",
                             "").replace("/meraki/api/v1/organizations", "")
    arr = p.split("/")

    isotime = datetime.datetime.now().isoformat()
    org_id = arr[0]

    fixedvals = {
        "organizations": {
            "id": "{{id-num:18}}",
            "url": "{{url}}/o/{{id-mix:7}}/manage/organization/overview"
        },
        "groups": {
            "groupId": "{{length}}",
            "versionNum": 1,
            "createdAt": isotime,
            "updatedAt": isotime
        },
        "acls": {
            "aclId": "{{length}}",
            "versionNum": 1,
            "createdAt": isotime,
            "updatedAt": isotime
        },
        "bindings": {
            "versionNum": 1,
            "updatedAt": isotime
        }
    }
    postvals = {
        "organizations": {
            "name": None
        },
        "groups": {
            "name": None,
            "description": None,
            "value": None,
            "networkObjectId": None
        },
        "acls": {
            "name": None,
            "description": None,
            "ipVersion": None,
            "rules": None
        },
        "bindings": {
            "srcGroupId": None,
            "dstGroupId": None,
            "name": None,
            "description": None,
            "aclIds": None,
            "catchAllRule": None,
            "bindingEnabled": None,
            "monitorModeEnabled": None
        }
    }
    info = {
        "organizations": {
            "id": "id",
            "unique": [{
                "id": []
            }]
        },
        "groups": {
            "id": "groupId",
            "unique": [{
                "value": [],
                "groupId": []
            }]
        },
        "acls": {
            "id": "aclId",
            "unique": [{
                "name": [],
                "aclId": []
            }]
        },
        "bindings": {
            "none_as_delete_key": "aclIds",
            "put_unique": ["srcGroupId", "dstGroupId"],
            "unique_results": []
        }
    }

    append_log(log, "dashboard_simulator::", request.path)
    ret = None
    try:
        if len(arr) == 1:
            file_type = "orgs.json"
            full_dataset = []
            dataset = read_json_file(file_type, log)
            if arr[0] == "":
                elem_id = None
            else:
                elem_id = arr[0]
            endpoint = "organizations"
        else:
            file_type = arr[2] + ".json"
            full_dataset = read_json_file(file_type, log)
            dataset = full_dataset.pop(org_id, [])
            if len(arr) == 3 or request.method == "POST":
                elem_id = None
            else:
                elem_id = arr[3]
            endpoint = arr[2]
            if endpoint == "bindings" and (request.method == "POST"
                                           or request.method == "DELETE"):
                append_log(log,
                           "dashboard_monitor::bindings::Unsupported Method")
                db_log("dashboard_simulator", log)
                return HttpResponseBadRequest("Unsupported Method")

        if request.body:
            jd = json.loads(request.body)
        else:
            jd = None

        updated_data, ret = handle_request(request.method, jd, baseurl,
                                           endpoint, elem_id, dataset,
                                           fixedvals, postvals, info)
        if updated_data:
            if isinstance(full_dataset, list):
                write_file(file_type,
                           json.dumps(full_dataset + [updated_data], indent=4))
            else:
                full_dataset[org_id] = updated_data
                write_file(file_type, json.dumps(full_dataset, indent=4))
    except Exception as e:
        append_log(log, "dashboard_simulator::Exception.", e)

    db_log("dashboard_simulator", log)
    return ret
def digest_database_data(sa, log):
    append_log(log, "dashboard_monitor::digest_database_data::Account -", sa)
    dashboard = meraki.DashboardAPI(base_url=sa.dashboard.baseurl,
                                    api_key=sa.dashboard.apikey,
                                    print_console=False,
                                    output_log=False,
                                    caller=settings.CUSTOM_UA,
                                    suppress_logging=True)

    if not sa.apply_changes:
        append_log(
            log,
            "dashboard_monitor::digest_database_data::sync session not set to apply changes;"
        )
        return

    tags = TagData.objects.filter(Q(tag__do_sync=True) & Q(update_failed=False)).\
        exclude(organization=None)
    for o in tags:
        if o.source_id and o.update_dest() == "meraki":
            if o.tag.push_delete:
                try:
                    ret = meraki_delete_sgt(dashboard, o.organization.orgid,
                                            o.source_id)
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::SGT delete",
                        ret)
                    o.delete()
                except Exception as e:  # pragma: no cover
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::SGT Delete Exception",
                        e, traceback.format_exc())
                    o.update_failed = True
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {
                        "tag": str(o),
                        "error": "Exception: " + str(e)
                    }
                    o.save()
            else:
                try:
                    ret = meraki_update_sgt(dashboard,
                                            o.organization.orgid,
                                            o.source_id,
                                            name=o.tag.name,
                                            description=o.tag.description,
                                            value=o.tag.tag_number)
                    o.last_update_data = json.dumps(ret)
                    if "groupId" in ret:
                        o.last_update_state = "True"
                        o.source_id = ret["groupId"]
                        o.source_data = json.dumps(ret)
                    else:
                        o.last_update_state = "False"
                    o.last_update = make_aware(datetime.datetime.now())
                    o.save()
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::Push SGT update",
                        o.source_id, o.tag.name, o.tag.description, ret)
                except Exception as e:  # pragma: no cover
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::SGT Update Exception",
                        e, traceback.format_exc())
                    o.update_failed = True
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {
                        "tag": str(o),
                        "error": "Exception: " + str(e)
                    }
                    o.save()
        elif o.update_dest() == "meraki":
            try:
                ret = meraki_create_sgt(dashboard,
                                        o.organization.orgid,
                                        value=o.tag.tag_number,
                                        name=o.tag.name,
                                        description=o.tag.description)
                o.last_update_data = json.dumps(ret)
                if "groupId" in ret:
                    o.last_update_state = "True"
                    o.source_id = ret["groupId"]
                    o.source_data = json.dumps(ret)
                else:
                    o.last_update_state = "False"
                o.last_update = make_aware(datetime.datetime.now())
                o.save()
                append_log(
                    log,
                    "dashboard_monitor::digest_database_data::Push SGT create",
                    o.tag.tag_number, o.tag.name, o.tag.description, ret)
            except Exception as e:  # pragma: no cover
                append_log(
                    log,
                    "dashboard_monitor::digest_database_data::SGT Create Exception",
                    e, traceback.format_exc())
                o.update_failed = True
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_state = "False"
                o.last_update_data = {
                    "tag": str(o),
                    "error": "Exception: " + str(e)
                }
                o.save()

    acls = ACLData.objects.filter(Q(acl__do_sync=True) & Q(update_failed=False)).\
        exclude(organization=None)
    for o in acls:
        if o.source_id and o.update_dest() == "meraki":
            if o.acl.push_delete:
                try:
                    ret = meraki_delete_sgacl(dashboard, o.organization.orgid,
                                              o.source_id)
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::SGACL delete",
                        ret)
                    o.delete()
                except Exception as e:  # pragma: no cover
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::SGACL Delete Exception",
                        e, traceback.format_exc())
                    o.update_failed = True
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {
                        "acl": str(o),
                        "error": "Exception: " + str(e)
                    }
                    o.save()
            else:
                try:
                    ret = meraki_update_sgacl(dashboard,
                                              o.organization.orgid,
                                              o.source_id,
                                              name=o.acl.name,
                                              description=o.acl.description,
                                              rules=o.lookup_rules(o),
                                              ipVersion=o.lookup_version(o))
                    o.last_update_data = json.dumps(ret)
                    if "aclId" in ret:
                        o.last_update_state = "True"
                        o.source_id = ret["aclId"]
                        o.source_data = json.dumps(ret)
                    else:
                        o.last_update_state = "False"
                    o.last_update = make_aware(datetime.datetime.now())
                    o.save()
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::Push SGACL update",
                        o.source_id, o.acl.name, o.acl.description, ret)
                except Exception as e:  # pragma: no cover
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::SGACL Update Exception",
                        e, traceback.format_exc())
                    o.update_failed = True
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {
                        "acl": str(o),
                        "error": "Exception: " + str(e)
                    }
                    o.save()
        elif o.update_dest() == "meraki":
            try:
                ret = meraki_create_sgacl(dashboard,
                                          o.organization.orgid,
                                          name=o.acl.name,
                                          description=o.acl.description,
                                          rules=list(o.lookup_rules(o)),
                                          ipVersion=o.lookup_version(o))
                o.last_update_data = json.dumps(ret)
                if "aclId" in ret:
                    o.last_update_state = "True"
                    o.source_id = ret["aclId"]
                    o.source_data = json.dumps(ret)
                else:
                    o.last_update_state = "False"
                o.last_update = make_aware(datetime.datetime.now())
                o.save()
                append_log(
                    log,
                    "dashboard_monitor::digest_database_data::Push SGACL create",
                    o.acl.name, o.acl.description, ret)
            except Exception as e:  # pragma: no cover
                append_log(
                    log,
                    "dashboard_monitor::digest_database_data::SGACL Create Exception",
                    e, traceback.format_exc())
                o.update_failed = True
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_state = "False"
                o.last_update_data = {
                    "acl": str(o),
                    "error": "Exception: " + str(e)
                }
                o.save()

    policies = PolicyData.objects.filter(Q(policy__do_sync=True) & Q(update_failed=False)).\
        exclude(organization=None)
    for o in policies:
        if o.policy.push_delete and o.update_dest() == "meraki":
            try:
                srcsgt, dstsgt = o.policy.lookup_sgts(o)
                orgs = sa.dashboard.organization.all()
                for org in orgs:
                    ret = meraki_update_sgpolicy(dashboard,
                                                 org.orgid,
                                                 srcGroupId=srcsgt.source_id,
                                                 dstGroupId=dstsgt.source_id,
                                                 aclIds=None,
                                                 catchAllRule="global")
                    append_log(
                        log,
                        "dashboard_monitor::digest_database_data::Policy delete",
                        ret)
                    o.delete()
            except Exception as e:  # pragma: no cover
                append_log(
                    log,
                    "dashboard_monitor::digest_database_data::Policy Delete Exception",
                    e, traceback.format_exc())
                o.update_failed = True
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_state = "False"
                o.last_update_data = {
                    "policy": str(o),
                    "error": "Exception: " + str(e)
                }
                o.save()
        elif o.update_dest() == "meraki":
            try:
                srcsgt, dstsgt = o.lookup_sgt_data(o)
                sgacl = o.lookup_sgacl_data(o)
                acls = []
                if sgacl:
                    for s in sgacl:
                        acls.append(s.source_id)

                if not srcsgt or not dstsgt or sgacl is None:
                    o.update_failed = False  # was True; disabled for now
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {
                        "policy":
                        str(o),
                        "error":
                        "Meraki: Unable to locate sgt/acl data;" +
                        str(srcsgt) + ";" + str(dstsgt) + ";" + str(sgacl)
                    }
                    o.save()
                    continue

                ret = meraki_update_sgpolicy(
                    dashboard,
                    o.organization.orgid,
                    name=o.policy.name,
                    description=o.policy.description,
                    srcGroupId=srcsgt.source_id,
                    dstGroupId=dstsgt.source_id,
                    aclIds=acls,
                    catchAllRule=o.lookup_acl_catchall(o),
                    bindingEnabled=True,
                    monitorModeEnabled=False)
                o.last_update_data = json.dumps(ret)
                if "srcGroupId" in ret:
                    o.last_update_state = "True"
                    o.source_id = "s" + str(ret["srcGroupId"]) + "-d" + str(
                        ret["dstGroupId"])
                    o.source_data = json.dumps(ret)
                else:
                    o.last_update_state = "False"
                o.last_update = make_aware(datetime.datetime.now())
                o.save()
                append_log(
                    log,
                    "dashboard_monitor::digest_database_data::Push Policy update",
                    o.source_id, o.policy.name, o.policy.description, ret)
            except Exception as e:  # pragma: no cover
                append_log(
                    log,
                    "dashboard_monitor::digest_database_data::Policy Update Exception",
                    e, traceback.format_exc())
                o.update_failed = True
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_state = "False"
                o.last_update_data = {
                    "policy": str(o),
                    "error": "Exception: " + str(e)
                }
                o.save()
def sync_dashboard():
    log = []
    msg = "SYNC_DASHBOARD-NO_ACTION_REQUIRED"
    append_log(
        log,
        "dashboard_monitor::sync_dashboard::Checking Dashboard Accounts for re-sync..."
    )

    # If we know that something needs to be created, do that first.
    sss = SyncSession.objects.all()
    for ss in sss:
        digest_database_data(ss, log)
        ss.dashboard.force_rebuild = True
        ss.save()
        msg = "SYNC_DASHBOARD-CHANGES_MADE_FORCE_UPDATE"

    # Ensure that ISE has already completed a sync if it is the source of truth
    stat = SyncSession.objects.filter(
        Q(ise_source=False) | (Q(iseserver__last_sync__isnull=False)
                               & Q(dashboard__last_sync__isnull=True))
        | (Q(iseserver__last_sync__isnull=False)
           & Q(iseserver__last_sync__gte=F('dashboard__last_sync'))))
    if len(stat) <= 0:
        append_log(
            log,
            "dashboard_monitor::sync_dashboard::Skipping sync as ISE is primary and needs to sync first."
        )
        msg = "SYNC_DASHBOARD-ISE_NEEDS_SYNC"
    else:
        append_log(log, "dashboard_monitor::sync_dashboard::Running sync")

        for s in stat:
            ctime = make_aware(datetime.datetime.now()) - datetime.timedelta(
                seconds=s.sync_interval)
            # Perform sync if one of the following conditions is met
            # 1) The Sync Session is set to Force Rebuild (this one shouldn't be seen here. but just in case...)
            # 2) The Dashboard Instance is set to Force Rebuild
            # 3) The timestamp of the Dashboard database object isn't the same as the timestamp of it's last sync
            # 4) The timestamp of the Dashboard database object's last sync is beyond the configured manual sync timer
            dbs = SyncSession.objects.filter(
                Q(dashboard__force_rebuild=True) | Q(force_rebuild=True)
                | ~Q(dashboard__last_sync=F('dashboard__last_update'))
                | Q(dashboard__last_sync__lte=ctime))
            for d in dbs:
                # Log the reason(s) for the current sync
                if d.force_rebuild:  # pragma: no cover
                    append_log(
                        log,
                        "dashboard_monitor::sync_dashboard::Sync Session Force Rebuild",
                        d)
                    msg = "SYNC_DASHBOARD-SYNCSESSION_FORCE_REBUILD"
                    d.force_rebuild = False
                    d.save()
                if d.dashboard.force_rebuild:
                    append_log(
                        log,
                        "dashboard_monitor::sync_dashboard::Dashboard Force Rebuild",
                        d)
                    msg = "SYNC_DASHBOARD-DASHBOARD_FORCE_REBUILD"
                    d.dashboard.force_rebuild = False
                    d.dashboard.save()
                if d.dashboard.last_sync != d.dashboard.last_update:
                    append_log(
                        log,
                        "dashboard_monitor::sync_dashboard::Database Config / Sync Timestamp Mismatch",
                        d)
                    msg = "SYNC_DASHBOARD-CONFIG_SYNC_TIMESTAMP_MISMATCH"
                if d.dashboard.last_sync and (d.dashboard.last_sync <= ctime):
                    append_log(
                        log,
                        "dashboard_monitor::sync_dashboard::Past Manual Sync Interval",
                        d)
                    msg = "SYNC_DASHBOARD-PAST_SYNC_INTERVAL"

                ingest_dashboard_data(dbs, log)

    # After ingesting data, more updates may be required (Should these have been caught elsewhere?)
    sss = SyncSession.objects.all()
    for ss in sss:
        digest_database_data(ss, log)

    append_log(log, "dashboard_monitor::sync_dashboard::Done")
    db_log("dashboard_monitor", log)
    return msg, log
def ingest_dashboard_data(accounts, log):
    append_log(log, "dashboard_monitor::ingest_dashboard_data::Accounts -",
               accounts)
    dt = make_aware(datetime.datetime.now())

    for sa in accounts:
        if not sa.sync_enabled:
            append_log(
                log,
                "dashboard_monitor::digest_database_data::sync session not set to allow sync;"
            )
            return

        a = sa.dashboard
        append_log(log, "dashboard_monitor::ingest_dashboard_data::Resync -",
                   a.description)
        dashboard = meraki.DashboardAPI(base_url=a.baseurl,
                                        api_key=a.apikey,
                                        print_console=False,
                                        output_log=False,
                                        caller=settings.CUSTOM_UA,
                                        suppress_logging=True)
        orgs = a.organization.all()
        if orgs:
            for org in orgs:
                org_id = org.orgid
                append_log(log, "dashboard_monitor::processing orgid::",
                           org_id)
                sgts = meraki_read_sgt(dashboard, org_id)
                sgacls = meraki_read_sgacl(dashboard, org_id)
                sgpolicies = meraki_read_sgpolicy(dashboard, org_id)
                append_log(
                    log, "dashboard_monitor::ingest_dashboard_data::SGTs - ",
                    len(sgts))
                append_log(
                    log, "dashboard_monitor::ingest_dashboard_data::SGACLs - ",
                    len(sgacls))
                append_log(
                    log,
                    "dashboard_monitor::ingest_dashboard_data::Policies - ",
                    len(sgpolicies))

                merge_sgts("meraki", sgts, not sa.ise_source, sa, log, org)
                merge_sgacls("meraki", sgacls, not sa.ise_source, sa, log, org)
                merge_sgpolicies("meraki", sgpolicies, not sa.ise_source, sa,
                                 log, org)

                clean_sgts("meraki", sgts, not sa.ise_source, sa, log, org)
                clean_sgacls("meraki", sgacls, not sa.ise_source, sa, log, org)
                clean_sgpolicies("meraki", sgpolicies, not sa.ise_source, sa,
                                 log, org)

                org.raw_data = json.dumps({
                    "groups": sgts,
                    "acls": sgacls,
                    "bindings": sgpolicies
                })
                org.force_rebuild = False
                org.last_sync = dt
                org.last_update = dt
                org.skip_sync = True
                org.save()
                sa.dashboard.last_sync = dt
                sa.dashboard.save()
        else:
            append_log(
                log,
                "dashboard_monitor::ingest_dashboard_data::No OrgId present")
def digest_database_data(sa, log):
    append_log(log, "ise_monitor::digest_database_data::Account -", sa)
    ise = ERS(ise_node=sa.iseserver.ipaddress, ers_user=sa.iseserver.username, ers_pass=sa.iseserver.password,
              verify=False, disable_warnings=True)

    if not sa.apply_changes:
        append_log(log, "ise_monitor::digest_database_data::sync session not set to apply changes")
        return

    policies = PolicyData.objects.filter(Q(policy__do_sync=True) & Q(update_failed=False)).exclude(iseserver=None)
    for o in policies:
        if o.source_id and o.update_dest() == "ise":
            if o.policy.push_delete:
                try:
                    ret = ise.delete_egressmatrixcell(o.source_id)
                    append_log(log, "ise_monitor::digest_database_data::Policy delete", ret)
                    o.delete()
                except Exception as e:  # pragma: no cover
                    append_log(log, "ise_monitor::digest_database_data::Policy Delete Exception", e,
                               traceback.format_exc())
                    o.update_failed = True
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {"tag": str(o), "error": "Exception: " + str(e)}
                    o.save()

    append_log(log, "ise_monitor::digest_database_data::Tag check")
    from django.forms.models import model_to_dict
    tags = TagData.objects.filter(Q(tag__do_sync=True) & Q(update_failed=False)).exclude(iseserver=None)
    for o in tags:
        append_log(log, "ise_monitor::digest_database_data::Tag", o.tag.name, o.tag.do_sync, o.tag.in_sync(), o.update_dest(), model_to_dict(o))
        if o.source_id and o.update_dest() == "ise":
            if o.tag.push_delete:
                try:
                    ret = ise.delete_sgt(o.source_id)
                    append_log(log, "ise_monitor::digest_database_data::SGT delete", ret)
                    o.delete()
                except Exception as e:  # pragma: no cover
                    append_log(log, "ise_monitor::digest_database_data::SGT Delete Exception", e,
                               traceback.format_exc())
                    o.update_failed = True
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {"tag": str(o), "error": "Exception: " + str(e)}
                    o.save()
            else:
                try:
                    ret = ise.update_sgt(o.source_id, o.tag.cleaned_name(), o.tag.description, o.tag.tag_number,
                                         return_object=True)
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_data = ret
                    o.last_update_state = str(ret.get("success", False))
                    if ret["response"] and isinstance(ret["response"], dict):
                        o.source_id = ret["response"]["id"]
                        o.source_data = json.dumps(ret["response"])
                        append_log(log, "ise_monitor::digest_database_data::Push SGT update", o.source_id,
                                   o.tag.cleaned_name(), o.tag.description, o.tag.tag_number, ret)
                    else:
                        append_log(log, "ise_monitor::digest_database_data::SGT Invalid Return", ret)
                        o.update_failed = True
                        o.last_update_state = "False"
                        o.last_update_data = {"tag": str(o), "error": "SGT Invalid return"}
                    o.save()
                except Exception as e:     # pragma: no cover
                    append_log(log, "ise_monitor::digest_database_data::SGT Update Exception", e,
                               traceback.format_exc())
                    o.update_failed = True
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {"tag": str(o), "error": "Exception: " + str(e)}
                    o.save()
        elif o.update_dest() == "ise":
            try:
                ret = ise.add_sgt(o.tag.cleaned_name(), o.tag.description, o.tag.tag_number, return_object=True)
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_data = ret
                o.last_update_state = str(ret.get("success", False))
                if ret["response"] and isinstance(ret["response"], dict):
                    o.source_id = ret["response"]["id"]
                    o.source_data = json.dumps(ret["response"])
                    append_log(log, "ise_monitor::digest_database_data::Push SGT create", o.tag.cleaned_name(),
                               o.tag.description, o.tag.tag_number, ret)
                else:     # pragma: no cover
                    append_log(log, "ise_monitor::digest_database_data::SGT Invalid Return", ret)
                    o.update_failed = True
                    o.last_update_state = "False"
                    o.last_update_data = {"tag": str(o), "error": "SGT Invalid Return"}
                o.save()
            except Exception as e:     # pragma: no cover
                append_log(log, "ise_monitor::digest_database_data::SGT Create Exception", e, traceback.format_exc())
                o.update_failed = True
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_state = "False"
                o.last_update_data = {"tag": str(o), "error": "Exception: " + str(e)}
                o.save()

    acls = ACLData.objects.filter(Q(acl__do_sync=True) & Q(update_failed=False)).exclude(iseserver=None)
    for o in acls:
        if o.source_id and o.update_dest() == "ise":
            if o.acl.push_delete:
                try:
                    ret = ise.delete_sgacl(o.source_id)
                    append_log(log, "ise_monitor::digest_database_data::SGACL delete", ret)
                    o.delete()
                except Exception as e:  # pragma: no cover
                    append_log(log, "ise_monitor::digest_database_data::SGACL Delete Exception", e,
                               traceback.format_exc())
                    o.update_failed = True
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {"acl": str(o), "error": "Exception: " + str(e)}
                    o.save()
            else:
                try:
                    ret = ise.update_sgacl(o.source_id, o.acl.cleaned_name(), o.acl.description, o.lookup_version(o),
                                           o.lookup_rules(o).split("\n"), return_object=True)
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_data = ret
                    o.last_update_state = str(ret.get("success", False))
                    if ret["response"] and isinstance(ret["response"], dict):
                        o.source_id = ret["response"]["id"]
                        o.source_data = json.dumps(ret["response"])
                        append_log(log, "ise_monitor::digest_database_data::Push SGACL update", o.source_id,
                                   o.acl.cleaned_name(), o.acl.description, ret)
                    else:     # pragma: no cover
                        append_log(log, "ise_monitor::digest_database_data::SGACL Invalid Return", ret)
                        o.update_failed = True
                        o.last_update_state = "False"
                        o.last_update_data = {"acl": str(o), "error": "SGACL Invalid Return"}
                    o.save()
                except Exception as e:     # pragma: no cover
                    append_log(log, "ise_monitor::digest_database_data::SGACL Update Exception", e,
                               traceback.format_exc())
                    o.update_failed = True
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {"acl": str(o), "error": "Exception: " + str(e)}
                    o.save()
        elif o.update_dest() == "ise":
            try:
                ret = ise.add_sgacl(o.acl.cleaned_name(), o.acl.description, o.lookup_version(o),
                                    o.lookup_rules(o).split("\n"), return_object=True)
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_data = ret
                o.last_update_state = str(ret.get("success", False))
                if ret["response"] and isinstance(ret["response"], dict):
                    o.source_id = ret["response"]["id"]
                    o.source_data = json.dumps(ret["response"])
                    append_log(log, "ise_monitor::digest_database_data::Push SGACL create", o.acl.cleaned_name(),
                               o.acl.description, ret)
                else:     # pragma: no cover
                    append_log(log, "ise_monitor::digest_database_data::SGACL Null Return", ret)
                    o.update_failed = True
                    o.last_update_state = "False"
                    o.last_update_data = {"acl": str(o), "error": "SGACL Null Return"}
                o.save()
            except Exception as e:     # pragma: no cover
                append_log(log, "ise_monitor::digest_database_data::SGACL Create Exception", e, traceback.format_exc())
                o.update_failed = True
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_state = "False"
                o.last_update_data = {"acl": str(o), "error": "Exception: " + str(e)}
                o.save()

    policies = PolicyData.objects.filter(Q(policy__do_sync=True) & Q(update_failed=False)).exclude(iseserver=None)
    for o in policies:
        if o.source_id and not o.policy.push_delete and o.update_dest() == "ise":
            try:
                srcsgt, dstsgt = o.lookup_sgt_data(o)
                sgacl = o.lookup_sgacl_data(o)
                acls = []
                if sgacl:
                    for s in sgacl:
                        acls.append(s.source_id)

                if not srcsgt or not dstsgt or sgacl is None:
                    o.update_failed = False     # was True; disabled for now
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {"policy": str(o), "error": "ISE Update: Unable to locate sgt/sgacl data;" +
                                                                     str(srcsgt) + ";" + str(dstsgt) + ";" + str(sgacl)}
                    o.save()
                    continue

                ret = ise.update_egressmatrixcell(o.source_id, srcsgt.source_id, dstsgt.source_id,
                                                  o.lookup_acl_catchall(o),
                                                  acls=acls, description=o.lookup_description(o),
                                                  return_object=True)
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_data = ret
                o.last_update_state = str(ret.get("success", False))
                if ret["response"] and isinstance(ret["response"], dict):
                    o.source_id = ret["response"]["id"]
                    o.source_data = json.dumps(ret["response"])
                    append_log(log, "ise_monitor::digest_database_data::Push Policy update", o.source_id, o.policy.name,
                               o.lookup_description(o), ret)
                else:     # pragma: no cover
                    append_log(log, "ise_monitor::digest_database_data::Policy Null Return", ret)
                    o.update_failed = True
                    o.last_update_state = "False"
                    o.last_update_data = {"policy": str(o), "error": "Policy Null Return"}
                o.save()
            except Exception as e:     # pragma: no cover
                append_log(log, "ise_monitor::digest_database_data::Policy Update Exception", e,
                           traceback.format_exc())
                o.update_failed = True
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_state = "False"
                o.last_update_data = {"policy": str(o), "error": "Exception: " + str(e)}
                o.save()
        elif o.update_dest() == "ise":
            try:
                srcsgt, dstsgt = o.lookup_sgt_data(o)
                sgacl = o.lookup_sgacl_data(o)
                acls = []
                if sgacl:
                    for s in sgacl:
                        acls.append(s.source_id)

                if not srcsgt or not dstsgt or sgacl is None:
                    o.update_failed = True
                    o.last_update = make_aware(datetime.datetime.now())
                    o.last_update_state = "False"
                    o.last_update_data = {"policy": str(o), "error": "ISE Create: Unable to locate sgt/sgacl data;" +
                                                                     str(srcsgt) + ";" + str(dstsgt) + ";" + str(sgacl)}
                    o.save()
                    continue

                ret = ise.add_egressmatrixcell(srcsgt.source_id, dstsgt.source_id, o.lookup_acl_catchall(o),
                                               acls=acls, description=o.lookup_description(o),
                                               return_object=True)
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_data = ret
                o.last_update_state = str(ret.get("success", False))
                if ret["response"] and isinstance(ret["response"], dict):
                    o.source_id = ret["response"]["id"]
                    o.source_data = json.dumps(ret["response"])
                    append_log(log, "ise_monitor::digest_database_data::Push Policy create", o.policy.name,
                               o.lookup_description(o), ret)
                else:     # pragma: no cover
                    append_log(log, "ise_monitor::digest_database_data::Policy Null Return", ret)
                    o.update_failed = True
                    o.last_update_state = "False"
                    o.last_update_data = {"policy": str(o), "error": "Policy Null Return"}
                o.save()
            except Exception as e:     # pragma: no cover
                append_log(log, "ise_monitor::digest_database_data::Policy Create Exception", e, traceback.format_exc())
                o.update_failed = True
                o.last_update = make_aware(datetime.datetime.now())
                o.last_update_state = "False"
                o.last_update_data = {"policy": str(o), "error": "Exception: " + str(e)}
                o.save()
def ingest_ise_data(accounts, log, server_only=False):
    append_log(log, "ise_monitor::ingest_server_data::Accounts -", accounts)
    dt = make_aware(datetime.datetime.now())

    for sync_account in accounts:
        if not server_only:
            if not sync_account.sync_enabled:
                append_log(log, "ise_monitor::ingest_server_data::sync session not set to allow sync;")
                return

            ise = None
            a = sync_account.iseserver
            src = sync_account.ise_source
            sa = sync_account
        else:
            a = sync_account
            src = False
            sa = None

        append_log(log, "ise_monitor::ingest_server_data::Resync -", a.description)
        ise = ERS(ise_node=a.ipaddress, ers_user=a.username, ers_pass=a.password, verify=False, disable_warnings=True)
        sgts = ise.get_sgts(detail=True)
        sgacls = ise.get_sgacls(detail=True)
        sgpolicies = ise.get_egressmatrixcells(detail=True)
        append_log(log, "ise_monitor::ingest_server_data::SGTs - ", len(sgts))
        append_log(log, "ise_monitor::ingest_server_data::SGACLs - ", len(sgacls))
        append_log(log, "ise_monitor::ingest_server_data::Policies - ", len(sgpolicies))
        ise = {"sgts": sgts, "sgacls": sgacls, "sgpolicies": sgpolicies}

        merge_sgts("ise", sgts["response"], src, sa, log, a)
        merge_sgacls("ise", sgacls["response"], src, sa, log, a)
        merge_sgpolicies("ise", sgpolicies["response"], src, sa, log, a)

        clean_sgts("ise", sgts["response"], src, sa, log, a)
        clean_sgacls("ise", sgacls["response"], src, sa, log, a)
        clean_sgpolicies("ise", sgpolicies["response"], src, sa, log, a)

        a.raw_data = json.dumps(ise)
        a.force_rebuild = False
        a.last_sync = dt
        a.last_update = dt
        a.skip_sync = True
        a.skip_update = True
        a.save()
Exemplo n.º 11
0
def clean_sgts(src, sgts, is_base, sync_session, log=None, obj=None):
    append_log(log, "db_trustsec::clean_sgts::", len(sgts))
    changed_objs = []
    try:
        active_id_list = []
        if src == "ise":
            for s in sgts:
                active_id_list.append(s["id"])
            tags = TagData.objects.filter(iseserver=obj)
        else:
            for s in sgts:
                active_id_list.append(s["groupId"])
            tags = TagData.objects.filter(organization=obj)

        for i in tags:
            if src == "ise" and i.source_id and i.source_id not in active_id_list:
                if is_base:
                    append_log(log, "db_trustsec::clean_sgts::setting ise", i.source_id, "for delete")
                    i.tag.push_delete = True
                    i.tag.save()
                    i.delete()
                else:
                    append_log(log, "db_trustsec::clean_sgts::removing ise", i.source_id, "from db")
                    i.source_id = None
                    i.source_data = None
                    i.last_update = make_aware(datetime.datetime.now())
                    i.save()
                    changed_objs.append(i)
            if src == "meraki" and i.source_id and i.source_id not in active_id_list:
                if is_base:
                    append_log(log, "db_trustsec::clean_sgts::setting meraki", i.source_id, "for delete")
                    i.tag.push_delete = True
                    i.tag.save()
                    i.delete()
                else:
                    append_log(log, "db_trustsec::clean_sgts::removing meraki", i.source_id, "from db")
                    i.source_id = None
                    i.source_data = None
                    i.last_update = make_aware(datetime.datetime.now())
                    i.save()
                    changed_objs.append(i)

        dbobjs = Tag.objects.all()
        for dbo in dbobjs:
            if len(dbo.tagdata_set.all()) == 0:
                dbo.delete()

        return changed_objs
    except Exception as e:    # pragma: no cover
        append_log(log, "db_trustsec::clean_sgts::Exception in clean_sgts: ", e)
    return changed_objs
Exemplo n.º 12
0
def merge_sgpolicies(src, sgpolicies, is_base, sync_session, log=None, obj=None):
    changed_objs = []
    try:
        iseservers = ISEServer.objects.all()
        organizations = Organization.objects.filter(dashboard__syncsession=sync_session)
        for s in sgpolicies:
            src_grp = dst_grp = binding_id = binding_name = binding_desc = policy_name = policy_desc = None
            if src == "meraki":
                p_src = TagData.objects.filter(source_id=s["srcGroupId"]).\
                    filter(organization__dashboard=sync_session.dashboard)
                p_dst = TagData.objects.filter(source_id=s["dstGroupId"]).\
                    filter(organization__dashboard=sync_session.dashboard)
                src_grp = p_src[0] if len(p_src) > 0 else None
                dst_grp = p_dst[0] if len(p_dst) > 0 else None
            elif src == "ise":
                p_src = TagData.objects.filter(source_id=s["sourceSgtId"]).\
                    filter(iseserver=sync_session.iseserver)
                p_dst = TagData.objects.filter(source_id=s["destinationSgtId"]).\
                    filter(iseserver=sync_session.iseserver)
                src_grp = p_src[0] if len(p_src) > 0 else None
                dst_grp = p_dst[0] if len(p_dst) > 0 else None
                if src_grp and dst_grp:
                    if src_grp.tag.tag_number == 65535 and dst_grp.tag.tag_number == 65535:
                        continue

            if src_grp and dst_grp:
                binding_name = str(src_grp.tag.tag_number) + "-" + str(dst_grp.tag.tag_number)
                binding_id = "s" + str(src_grp.source_id) + "-d" + str(dst_grp.source_id)
                binding_desc = str(src_grp.tag.name) + "-" + str(dst_grp.tag.name)
                policy_name = s.get("name", "")
                policy_desc = s.get("description", "")

                policy_name = binding_name if (policy_name is None or policy_name == "") else policy_name
                policy_desc = binding_desc if (policy_desc is None or policy_desc == "") else policy_desc

                # Look up policy, and see if the source matches the current input. If so, check for updates...
                full_update = False
                if src == "meraki":
                    pol, created = Policy.objects.get_or_create(mapping=binding_name,
                                                                defaults={"name": policy_name,
                                                                          "description": policy_desc,
                                                                          "origin_org": obj,
                                                                          "syncsession": sync_session})
                else:
                    pol, created = Policy.objects.get_or_create(mapping=binding_name,
                                                                defaults={"name": policy_name,
                                                                          "description": policy_desc,
                                                                          "origin_ise": obj,
                                                                          "syncsession": sync_session})
                if created:
                    changed_objs.append(pol)
                    append_log(log, "db_trustsec::merge_policies::creating policy", policy_name, "...")
                    full_update = True
                else:
                    if is_base:
                        append_log(log, "db_trustsec::merge_policies::policy::" + src + "::", policy_name,
                                   "exists in database; updating...")
                        full_update = True
                    else:
                        append_log(log, "db_trustsec::merge_policies::policy::" + src + "::", policy_name,
                                   "exists in database; not base, only add data...")

                if full_update:
                    pol.mapping = binding_name
                    if pol.name != policy_name and pol.cleaned_name() != policy_name:
                        pol.name = policy_name
                    pol.description = policy_desc
                    pol.source_group = src_grp.tag
                    pol.dest_group = dst_grp.tag
                    acl_set = []
                    if src == "meraki":
                        acls = ACLData.objects.filter(source_id__in=s["aclIds"])
                        for a in acls:
                            # if a.acl not in pol.acl.all():
                            acl_set.append(a.acl)
                        pol.acl.set(acl_set)
                    else:
                        acls = ACLData.objects.filter(source_id__in=s["sgacls"])
                        for a in acls:
                            acl_set.append(a.acl)
                        pol.acl.set(acl_set)
                    changed_objs.append(pol)
                    pol.save()

                if not pol.push_delete:
                    if src == "meraki":
                        PolicyData.objects.update_or_create(policy=pol, organization=obj,
                                                            defaults={"source_id": binding_id,
                                                                      "source_data": json.dumps(s),
                                                                      "source_ver": s["versionNum"],
                                                                      "last_sync": make_aware(datetime.datetime.now())})
                        # Ensure PolicyData objects exist for ISE
                        for i in iseservers:
                            PolicyData.objects.get_or_create(policy=pol, iseserver=i)
                        # Ensure PolicyData objects exist for all Meraki Orgs
                        for o in organizations:
                            PolicyData.objects.get_or_create(policy=pol, organization=o)
                    elif src == "ise":
                        PolicyData.objects.update_or_create(policy=pol, iseserver=obj,
                                                            defaults={"source_id": s["id"],
                                                                      "source_data": json.dumps(s),
                                                                      "source_ver": s.get("generationId", None),
                                                                      "last_sync": make_aware(datetime.datetime.now())})
                        # Ensure PolicyData objects exist for all Meraki Orgs
                        for o in organizations:
                            PolicyData.objects.get_or_create(policy=pol, organization=o)
            elif s.get("name", "") == "ANY-ANY":
                pass
            else:
                append_log(log, "db_trustsec::merge_sgpolicies::missing src or dst", s, src_grp, dst_grp)
        return changed_objs
    except Exception as e:    # pragma: no cover
        append_log(log, "db_trustsec::merge_sgpolicies::Exception in merge_sgpolicies: ", e, traceback.format_exc())
    return changed_objs
Exemplo n.º 13
0
def merge_sgacls(src, sgacls, is_base, sync_session, log=None, obj=None):
    changed_objs = []
    try:
        iseservers = ISEServer.objects.all()
        organizations = Organization.objects.filter(dashboard__syncsession=sync_session)
        for s in sgacls:
            tag_name = s.get("name", "")
            if tag_name == "":
                append_log(log, "db_trustsec::merge_sgacls::sgacl doesn't have name; skipping", s)
                continue
            tn_ise = tag_name.replace(" ", "_")
            tn_mer = tag_name.replace("_", " ")
            tid = s["id"] if "id" in s else s["aclId"]

            if tag_name:
                # Look up acl, and see if the source matches the current input. If so, check for updates...
                aclds = ACLData.objects.filter(source_id=tid)
                if len(aclds) > 0:
                    acls = [aclds[0].acl]
                else:
                    acls = ACL.objects.filter(Q(name=tn_mer) | Q(name=tn_ise))

                if len(acls) <= 0:
                    created = True

                    if tag_name in ("Deny_IP_Log", "Permit IP", "Permit_IP_Log", "Deny IP"):
                        isvisible = False
                    else:
                        isvisible = True

                    if src == "meraki":
                        acl = ACL.objects.create(name=tag_name, description=s["description"], origin_org=obj,
                                                 syncsession=sync_session, visible=isvisible)
                    else:
                        acl = ACL.objects.create(name=tag_name, description=s["description"], origin_ise=obj,
                                                 syncsession=sync_session, visible=isvisible)
                else:
                    created = False
                    acl = acls[0]

                if created:
                    append_log(log, "db_trustsec::merge_sgacls::creating acl", tag_name, "...")
                    changed_objs.append(acl)
                else:
                    if is_base:
                        append_log(log, "db_trustsec::merge_sgacls::sgacl::" + src + "::", tag_name,
                                   "exists in database; updating...")
                        if acl.name != s["name"] and acl.cleaned_name() != s["name"]:
                            acl.name = s["name"]
                        acl.description = s["description"].replace("'", "").replace('"', "")
                        changed_objs.append(acl)
                        acl.save()
                    else:
                        append_log(log, "db_trustsec::merge_sgacls::sgacl::" + src + "::", tag_name,
                                   "exists in database; not base, only add data...")

                if not acl.push_delete:
                    if src == "meraki":
                        ACLData.objects.update_or_create(acl=acl, organization=obj,
                                                         defaults={"source_id": s["aclId"],
                                                                   "source_data": json.dumps(s),
                                                                   "source_ver": s["versionNum"],
                                                                   "last_sync": make_aware(datetime.datetime.now())})
                        # Ensure ACLData objects exist for ISE
                        for i in iseservers:
                            ACLData.objects.get_or_create(acl=acl, iseserver=i)
                        # Ensure ACLData objects exist for all Meraki Orgs
                        for o in organizations:
                            ACLData.objects.get_or_create(acl=acl, organization=o)
                    elif src == "ise":
                        ACLData.objects.update_or_create(acl=acl, iseserver=obj,
                                                         defaults={"source_id": s["id"],
                                                                   "source_data": json.dumps(s),
                                                                   "source_ver": s["generationId"],
                                                                   "last_sync": make_aware(datetime.datetime.now())})
                        # Ensure ACLData objects exist for all Meraki Orgs
                        for o in organizations:
                            ACLData.objects.get_or_create(acl=acl, organization=o)
        return changed_objs
    except Exception as e:    # pragma: no cover
        append_log(log, "db_trustsec::merge_sgacls::Exception in merge_sgacls: ", e, traceback.format_exc())
        return changed_objs
Exemplo n.º 14
0
def merge_sgts(src, sgts, is_base, sync_session, log=None, obj=None):
    changed_objs = []
    try:
        iseservers = ISEServer.objects.all()
        organizations = Organization.objects.filter(dashboard__syncsession=sync_session)
        for s in sgts:
            tag_num = None
            if isinstance(s, dict):
                if "value" in s:
                    tag_num = s["value"]
                elif "tag" in s:
                    tag_num = s["tag"]
            else:
                tag_num = None
            tid = s["id"] if "id" in s else s["groupId"]
            append_log(log, "db_trustsec::merge_sgts::evaluating", tag_num, "(", tid, ")...")

            if tag_num is not None:
                # Look up tag, and see if the source matches the current input. If so, check for updates...
                tagds = TagData.objects.filter(source_id=tid)
                if len(tagds) > 0:
                    tag = tagds[0].tag
                else:
                    tag = None

                if tag:
                    if is_base:
                        append_log(log, "db_trustsec::merge_sgts::sgt::" + src + "::", tag_num,
                                   "exists in database; updating...")
                        tag.tag_number = tag_num
                        if tag.name != s["name"] and tag.cleaned_name() != s["name"]:
                            tag.name = s["name"]
                        tag.description = s["description"].replace("'", "").replace('"', "")
                        tag.save()
                        changed_objs.append(tag)
                    else:
                        append_log(log, "db_trustsec::merge_sgts::sgt::" + src + "::", tag_num,
                                   "exists in database; not base, only add data...")
                    created = False
                else:
                    if src == "meraki":
                        tag, created = Tag.objects.get_or_create(tag_number=tag_num,
                                                                 defaults={"name": s["name"],
                                                                           "description": s["description"],
                                                                           "origin_org": obj,
                                                                           "syncsession": sync_session})
                    else:
                        tag, created = Tag.objects.get_or_create(tag_number=tag_num,
                                                                 defaults={"name": s["name"],
                                                                           "description": s["description"],
                                                                           "origin_ise": obj,
                                                                           "syncsession": sync_session})
                if created:
                    changed_objs.append(tag)
                    append_log(log, "db_trustsec::merge_sgts::creating tag", tag_num, "...")

                # Ensure that all Data objects exist in DB
                if not tag.push_delete:
                    append_log(log, "db_trustsec::merge_sgts::sgt::" + src + "::", tag_num,
                               "writing raw data to database...")
                    if src == "meraki":
                        TagData.objects.update_or_create(tag=tag, organization=obj,
                                                         defaults={"source_id": s["groupId"],
                                                                   "source_data": json.dumps(s),
                                                                   "source_ver": s["versionNum"],
                                                                   "last_sync": make_aware(datetime.datetime.now())})
                        # Ensure TagData objects exist for ISE
                        for i in iseservers:
                            TagData.objects.get_or_create(tag=tag, iseserver=i)
                        # Ensure TagData objects exist for all Meraki Orgs
                        for o in organizations:
                            TagData.objects.get_or_create(tag=tag, organization=o)
                    elif src == "ise":
                        if s.get("generationId"):
                            TagData.objects.update_or_create(tag=tag, iseserver=obj,
                                                             defaults={"source_id": s["id"],
                                                                       "source_data": json.dumps(s),
                                                                       "source_ver": s["generationId"],
                                                                       "last_sync":
                                                                           make_aware(datetime.datetime.now())})
                        else:
                            TagData.objects.update_or_create(tag=tag, iseserver=obj,
                                                             defaults={"source_id": s["id"],
                                                                       "source_data": json.dumps(s),
                                                                       "last_sync":
                                                                           make_aware(datetime.datetime.now())})
                        # Ensure TagData objects exist for all Meraki Orgs
                        for o in organizations:
                            TagData.objects.get_or_create(tag=tag, organization=o)

        return changed_objs
    except Exception as e:    # pragma: no cover
        append_log(log, "db_trustsec::merge_sgts::Exception in merge_sgts: ", e, traceback.format_exc())
    return changed_objs
def run():
    log = []
    dbs = Dashboard.objects.filter(webhook_enable=True)
    if dbs and len(dbs) > 0:
        db = dbs[0]
        dashboard = meraki.DashboardAPI(api_key=db.apikey,
                                        print_console=False,
                                        output_log=False)
        if db.webhook_ngrok:
            try:
                public_url = ngrok.connect(sys.argv[-1], "http")
            except Exception:
                try:
                    public_url = ngrok.connect(8000, "http")
                except Exception:
                    print("# Unable to launch ngrok")
                    return None

            db.webhook_url = public_url.replace("http://",
                                                "https://") + "/webhook/"
            db.save()

        orgs = db.organization.all()
        for org in orgs:
            nets = dashboard.networks.getOrganizationNetworks(org.orgid)
            for n in nets:
                whid = None
                whurl = dashboard.networks.getNetworkHttpServers(
                    networkId=n["id"])
                if len(whurl) <= 0:
                    append_log(log, "creating new webhook for network",
                               n["id"])
                    wh = dashboard.networks.createNetworkHttpServer(
                        networkId=n["id"],
                        name="adaptive-policy-sync",
                        url=db.webhook_url)
                    whid = wh["id"]
                else:
                    for w in whurl:
                        if w["name"] == "adaptive-policy-sync":
                            append_log(log, "updating for network", n["id"])
                            dashboard.networks.updateNetworkHttpServer(
                                networkId=n["id"],
                                id=w["id"],
                                url=db.webhook_url)
                            whid = w["id"]

                if whid:
                    al = dashboard.networks.getNetworkAlertSettings(
                        networkId=n["id"])
                    for a in al["alerts"]:
                        if a["type"] == "settingsChanged":
                            a["alertDestinations"]["httpServerIds"].append(
                                whid)
                            a["enabled"] = True

                    append_log(log, "updating alert settings", al)
                    r = dashboard.networks.updateNetworkAlertSettings(
                        networkId=n["id"],
                        defaultDestinations=al["defaultDestinations"],
                        alerts=al["alerts"])
                    append_log(log, "update response", r)
    else:
        append_log(log, "Dashboard webhooks are not configured")
        db_log("dashboard_webhook", log)
        raise Exception("Dashboard webhooks are not configured")

    db_log("dashboard_webhook", log)