def run():
    mx_host = CONFIG["mx"]["endpoint"]
    session_id = ss.login(mx_host, CONFIG["mx"]["username"],
                          CONFIG["mx"]["password"])
    sites_response = ss.makeCall(mx_host, session_id, "/conf/sites")
    sites = sites_response.json()
    for site in sites["sites"]:
        server_groups_response = ss.makeCall(mx_host, session_id,
                                             "/conf/serverGroups/" + site)
        server_groups = server_groups_response.json()
        for server_group in server_groups["server-groups"]:
            web_services_response = ss.makeCall(
                mx_host, session_id,
                "/conf/webServices/" + site + "/" + server_group)
            web_services = web_services_response.json()
            for web_service in web_services["web-services"]:
                applications_response = ss.makeCall(
                    mx_host, session_id, "/conf/webApplications/" + site +
                    "/" + server_group + "/" + web_service)
                applications = applications_response.json()
                for application in applications["webApplications"]:
                    profile_response = ss.makeCall(
                        mx_host, session_id, "/conf/webProfile/" + site + "/" +
                        server_group + "/" + web_service + "/" + application)
                    profile = profile_response.json()
                    for learned_host in profile["learnedHosts"]:
                        row = [site]
                        row.append(server_group)
                        row.append(web_service)
                        row.append(application)
                        row.append(learned_host)
                        CSV_DATA.append('"' + '","'.join(row) + '"')
    # print(CSV_DATA)
    csv_file.write("\n".join(CSV_DATA))
    csv_file.close()
def run():
    mx_host = CONFIG["mx"]["endpoint"]
    session_id = ss.login(mx_host, CONFIG["mx"]["username"],
                          CONFIG["mx"]["password"])

    # If TBL_GROUPS is empty, retrieve list of all table groups from MX
    if not len(TBL_GROUPS):
        tbl_grps_response = ss.makeCall(mx_host, session_id,
                                        "/conf/tableGroups/")
        tbl_grps = tbl_grps_response.json()
        logging.warning(
            "\n\nNo table groups found, loading all table groups from MX\n" +
            json.dumps(tbl_grps))

        for tbl_grp in tbl_grps:
            TBL_GROUPS.append(tbl_grp["displayName"])

    # Iterate through list of table group names and append to .csv
    for tbl_grp_name in TBL_GROUPS:
        if "/" not in tbl_grp_name:
            tbl_grp_name_ary = tbl_grp_name.split(' - ')
            print("retrieving table group: " + tbl_grp_name)
            data_type = tbl_grp_name_ary[len(tbl_grp_name_ary) - 1]
            tbl_grp_response = ss.makeCall(
                mx_host, session_id,
                "/conf/tableGroups/" + tbl_grp_name + "/data")
            tbl_grp = tbl_grp_response.json()
            # CSV_DATA.append(tbl_grp_name)
            for record in tbl_grp["records"]:
                if "Columns" in record:
                    for column_name in record["Columns"]:
                        row = [tbl_grp_name]
                        row.append(data_type)
                        row.append((record["Name"] or 'n/a'))
                        row.append((record["Type"] or 'n/a'))
                        row.append(column_name)
                        CSV_DATA.append('"' + '","'.join(row) + '"')
                else:
                    row = [tbl_grp_name]
                    row.append(data_type)
                    row.append((record["Type"] or 'n/a'))
                    row.append((record["Name"] or 'n/a'))
                    row.append("n/a")
                    CSV_DATA.append('"' + '","'.join(row) + '"')
        else:
            print("ignoring table group: " + tbl_grp_name)

    csv_file.write("\n".join(CSV_DATA))
    csv_file.close()
Ejemplo n.º 3
0
    "siteName": "Default Site",
    "serverGroupName": "Server us-east-1 Group ",
    "webServiceName": "HTTP Service",
    "webApplicationName": "Default Web Application"
}]

curPol = TEMP_POLICY.copy()
print("===================  START REQUEST  ====================\r\n")
for policy_name in sourcePolicies:
    matchCriteria = sourcePolicies[policy_name]
    #curPol = json.loads(json.dumps(TEMP_POLICY.copy()))
    curPol["matchCriteria"].append(json.loads(matchCriteria))
print(curPol)
curPol["applyTo"] = applyToService
response = ss.makeCall(
    PRIMARY_MX_HOST, session_id,
    "/conf/policies/security/webServiceCustomPolicies/" + policyPrefix +
    "_service", method, json.dumps(curPol))
curPol["applyTo"] = applyToApp
response = ss.makeCall(
    PRIMARY_MX_HOST, session_id,
    "/conf/policies/security/webApplicationCustomPolicies/" + policyPrefix +
    "_app", method, json.dumps(curPol))
#response = ss.makeCall(session_id, "/conf/policies/security/webServiceCustomPolicies/" + policyPrefix + "_" + policy_name, "POST", json.dumps(curPol))
if response.status_code == 200:
    print(response.status_code)
else:
    print(response.json())
print("===================  END RESPONSE  ====================\r\n")

for policy_name in sourcePolicies:
    curPol2 = {
Ejemplo n.º 4
0
def run():
    mx_host = CONFIG["mx_auth"]["endpoint"]
    session_id = ss.login(mx_host, CONFIG["mx_auth"]["username"],
                          CONFIG["mx_auth"]["password"])
    sites_response = ss.makeCall(mx_host, session_id, "/v1/conf/sites/")
    sites = sites_response.json()
    # Load all sites from site tree
    for site in sites["sites"]:
        # Load all server groups from each site
        server_groups_response = ss.makeCall(mx_host, session_id,
                                             '/v1/conf/serverGroups/' + site)
        server_groups = server_groups_response.json()
        for server_group in server_groups["server-groups"]:
            # Load all web serivces from from each server group
            web_services_response = ss.makeCall(
                mx_host, session_id,
                '/v1/conf/webServices/' + site + "/" + server_group)
            web_services = web_services_response.json()
            for web_service in web_services["web-services"]:
                # Load all inbound krp rules from web service
                krp_inbound_rules_response = ss.makeCall(
                    mx_host, session_id, '/v1/conf/webServices/' + site + "/" +
                    server_group + "/" + web_service + "/krpInboundRules")
                krp_inbound_rules = krp_inbound_rules_response.json()
                for krp_inbound_rule in krp_inbound_rules["inboundKrpRules"]:
                    for inbound_port in krp_inbound_rule["gatewayPorts"]:
                        url = '/v1/conf/webServices/' + site + "/" + server_group + "/" + web_service + "/krpInboundRules/" + krp_inbound_rule[
                            "gatewayGroupName"] + "/" + krp_inbound_rule[
                                "aliasName"] + "/" + str(
                                    inbound_port) + "/krpOutboundRules"
                        krp_outbound_rules_response = ss.makeCall(
                            mx_host, session_id, url)
                        krp_outbound_rules = krp_outbound_rules_response.json()
                        for krp_outbound_rule in krp_outbound_rules[
                                "outboundKrpRules"]:
                            krp_rule = {
                                "eventType":
                                CONFIG["newrelic_auth"]["event_type"],
                                "environment":
                                CONFIG["environment"],
                                "site":
                                site,
                                "server_group":
                                server_group,
                                "service":
                                web_service,
                                "gateway_group_name":
                                krp_inbound_rule["gatewayGroupName"],
                                "krp_alias_name":
                                krp_inbound_rule["aliasName"],
                                "inbound_port":
                                inbound_port,
                                "priority":
                                krp_outbound_rule["priority"],
                                "internal_host":
                                krp_outbound_rule["internalIpHost"],
                                "outbound_port":
                                krp_outbound_rule["serverPort"]
                            }
                            new_relic_url = "https://insights-collector.newrelic.com/v1/accounts/" + CONFIG[
                                "newrelic_auth"]["account_id"] + "/events"
                            headers = {
                                "Content-Type": "application/json",
                                "X-Insert-Key":
                                CONFIG["newrelic_auth"]["api_key"]
                            }
                            logging.warning("NEW RELIC REQUEST (" +
                                            new_relic_url + ")" +
                                            json.dumps(krp_rule))
                            #if "proxy_host" in CONFIG["proxies"] and "proxy_port" in CONFIG["proxies"] and "proxy_username" in CONFIG["proxies"] and "proxy_password" in CONFIG["proxies"]:
                            if "proxies" in CONFIG:
                                proxies = {
                                    "https":
                                    "https://" +
                                    CONFIG["proxies"]["proxy_username"] + ":" +
                                    CONFIG["proxies"]["proxy_password"] + "@" +
                                    CONFIG["proxies"]["proxy_host"] + ":" +
                                    CONFIG["proxies"]["proxy_port"]
                                }
                                response = requests.post(new_relic_url,
                                                         json.dumps(krp_rule),
                                                         proxies=proxies,
                                                         headers=headers,
                                                         verify=False)
                            else:
                                response = requests.post(new_relic_url,
                                                         json.dumps(krp_rule),
                                                         headers=headers,
                                                         verify=False)
                            logging.warning("NEW RELIC RESPONSE" +
                                            json.dumps(response.json()))
Ejemplo n.º 5
0
def run():
    with open(PATH2REPORT, 'r') as f:
        i = 0
        reader = csv.reader(f)
        # Parse csv
        for row in reader:
            if i == 0:
                recordsCsv["headers"] = row
            else:
                curRawRowWithIndexes = {}
                curRowWithIndexes = {}
                for j in range(len(row)):
                    val = row[j]
                    if isint(val):
                        val = int(val)
                    elif isfloat(val):
                        val = float(val)
                    header = re.findall(
                        r"([A-Za-z0-9].+?[A-Za-z0-9].+)",
                        recordsCsv["headers"][j].replace(" ",
                                                         "_").strip()).pop()
                    curRawRowWithIndexes[header] = val
                    # Parse only classification columns to send to ELK from config
                    if header in cxClassificationColumnMapping:
                        if header == "Table_Type":
                            curRowWithIndexes[cxClassificationColumnMapping[
                                header]] = objectMapping[val]
                        elif header == "Datasource_Type":
                            curRowWithIndexes[cxClassificationColumnMapping[
                                header]] = cxTableGroupDBServiceMapping[val]
                        else:
                            curRowWithIndexes[
                                cxClassificationColumnMapping[header]] = val
                    for col in cxClassificationAppendedValues:
                        curRowWithIndexes[
                            col] = cxClassificationAppendedValues[col]
                    curRowWithIndexes["Decision_Changed_Date"] = TIMESTAMP
                    curRowWithIndexes["Execution_Date"] = TIMESTAMP
                curRowWithIndexes[
                    "Table_Group"] = tableGroupPrefix + curRawRowWithIndexes[
                        "Datasource_Type"] + " - " + curRawRowWithIndexes[
                            "Datasource_Name"] + " - " + curRawRowWithIndexes[
                                "Schema"] + " - " + curRawRowWithIndexes[
                                    "Category"]
                recordsIndex["records"].append(curRowWithIndexes)
                recordsCsv["records"].append(curRawRowWithIndexes)
            i += 1
    # Write file formatted with string indexes per row/column
    f_index.write(json.dumps(recordsIndex))
    f_index.close()
    logging.warning('uploading file to s3: aws s3 cp ' + S3_REPORT_NAME +
                    ' s3://' + S3_BUCKET + "/" + S3_PREFIX + "/" +
                    S3_REPORT_NAME)
    logging.warning("uploading file (" + S3_REPORT_NAME +
                    ") to S3 with the following records: " +
                    json.dumps(recordsIndex))
    pipe = Popen([
        'aws', 's3', 'cp', S3_REPORT_NAME,
        's3://' + S3_BUCKET + "/" + S3_PREFIX + "/" + S3_REPORT_NAME
    ],
                 stdout=PIPE)
    pipe.communicate()
    os.remove(S3_REPORT_NAME)

    for record in recordsCsv["records"]:
        tableGroupName = tableGroupPrefix + record[
            "Datasource_Type"] + " - " + curRawRowWithIndexes[
                "Datasource_Name"] + " - " + record["Schema"] + " - " + record[
                    "Category"]
        tableName = record["Table"]
        columnName = record["Column"]
        if tableGroupName not in tableGroups:
            tableGroups[tableGroupName] = {
                "dataType":
                record["Category"],
                "serviceType":
                cxTableGroupDBServiceMapping[record["Datasource_Type"]],
                "records": {}
            }
        if tableName not in tableGroups[tableGroupName]:
            tableGroups[tableGroupName]["records"][tableName] = {
                "Type": objectMapping[record["Table_Type"]],
                "Name": tableName,
                "col_map": {},
                "Columns": []
            }
        if columnName not in tableGroups[tableGroupName]["records"][tableName][
                "col_map"]:
            tableGroups[tableGroupName]["records"][tableName]["col_map"][
                columnName] = True
            tableGroups[tableGroupName]["records"][tableName][
                "Columns"].append(columnName)

    curTableGroupsInMx = {}
    mx_host = CONFIG["mx"]["endpoint"]
    session_id = ss.login(mx_host, CONFIG["mx"]["username"],
                          CONFIG["mx"]["password"])
    tbl_grps_response = ss.makeCall(mx_host, session_id, "/conf/tableGroups/")
    tbl_grps = tbl_grps_response.json()

    for tbl_grp in tbl_grps:
        curTableGroupsInMx[tbl_grp["displayName"]] = True

    for tableGroupName in tableGroups:
        tableGroup = tableGroups[tableGroupName]
        if tableGroupName not in curTableGroupsInMx:
            logging.warning("Table group now found, adding table group: " +
                            str(tableGroupName))
            newTableGroupObj = {
                "isSensitive": True,
                "serviceTypes": [tableGroup["serviceType"]],
                "dataType": tableGroup["dataType"],
                "displayName": tableGroupName
            }
            tbl_grps_response = ss.makeCall(mx_host, session_id,
                                            "/conf/tableGroups/", "POST",
                                            json.dumps(newTableGroupObj))
        newTableGroupRecordsObj = {"records": []}
        for tableName in tableGroup["records"]:
            record = tableGroup["records"][tableName]
            newTableGroupRecordsObj["records"].append({
                "Name":
                tableName,
                "Type":
                record["Type"],
                "Columns":
                record["Columns"]
            })
        logging.warning("Populating table group (" + str(tableGroupName) +
                        ") with the following records: " +
                        json.dumps(newTableGroupRecordsObj))
        tbl_grps_response = ss.makeCall(
            mx_host, session_id,
            "/conf/tableGroups/" + urllib.quote(tableGroupName) + "/data",
            "POST", json.dumps(newTableGroupRecordsObj))
Ejemplo n.º 6
0
def run():
    primary_session_id = ss.login(AUTH["ENDPOINT"], AUTH["USERNAME"],
                                  AUTH["PASSWORD"])
    ss.initMxSyncLog(AUTH["ENDPOINT"], primary_session_id, MX_SYNC_DATASET)
    # Iterate through each policy and pull out normalized list of datasets, ipGroups, and signatures
    for policy_name in sourcePolicies:
        policyAttr = sourcePolicies[policy_name]
        if policyAttr["policyType"] in ss.policyMapping:
            #print(ss.policyMapping[policyAttr["policyType"]])
            logging.warning(
                "Retrieving policyType \"" + policyAttr["policyType"] +
                "\" policyName \"" + policy_name +
                "\" from primary MX - REQUEST: \nGET /conf/policies/security/"
                + ss.policyMapping[policyAttr["policyType"]] + "/" +
                policy_name)
            response = ss.makeCall(
                AUTH["ENDPOINT"], primary_session_id,
                "/conf/policies/security/" +
                ss.policyMapping[policyAttr["policyType"]] + "/" +
                urllib.quote(policy_name))
            if response.status_code == 404:
                policyAttr["isok"] = False
            else:
                policyObj = response.json()
                ALLPOLICIES[policy_name] = policyObj
                sourcePolicies[policy_name]["config.json"] = policyObj
                sourcePolicies[policy_name]["isok"] = True
                logging.warning("RESPONSE: \n" + str(policyObj))
                # No API call for anti-scraping
                # firewallPolicies
                # httpProtocolPolicies
                # http2ProtocolPolicies
                # webCorrelationPolicies
                # snippetInjectionPolicies
                # webApplicationSignaturesPolicies - signatures in predicates and exceptiosn
                # httpProtocolSignaturesPolicies
                # snippetInjectionPolicies
                # streamSignaturesPolicies
                # webApplicationSignaturesPolicies
                # webProfilePolicies
                curPolicyType = ss.policyMapping[policyAttr["policyType"]]
                # check for rules->ipGroup in firewallPolicies
                if "rules" in policyObj:
                    for rule in policyObj["rules"]:
                        if "ipGroup" in rule:
                            if rule["ipGroup"] not in ss.ignoreADCIpGroups:
                                # print("Capturing IPGroup \"" + rule["ipGroup"] + "\" for policy " + policy_name)
                                logging.warning("Capturing IPGroup \"" +
                                                rule["ipGroup"] +
                                                "\" for policy " + policy_name)
                                IPGROUPS[rule["ipGroup"]] = False
                            else:
                                # print("Ignoring IPGroup \"" + rule["ipGroup"] + "\" for policy " + policy_name)
                                logging.warning("Ignoring IPGroup \"" +
                                                rule["ipGroup"] +
                                                "\" for policy " + policy_name)
                # IPGROUPS[ipGroup] = ss.getIPGroup(AUTH["ENDPOINT"], primary_session_id, ipGroup)

                # check for exceptions->predicates->ipGroups in httpProtocolPolicies, http2ProtocolPolicies, webCorrelationPolicies, snippetInjectionPolicies
                if "exceptions" in policyObj:
                    for exception in policyObj["exceptions"]:
                        if "predicates" in exception:
                            for predicate in exception["predicates"]:
                                if "ipGroups" in predicate:
                                    for ipGroup in predicate["ipGroups"]:
                                        if ipGroup not in ss.ignoreADCIpGroups:
                                            # print("Capturing IPGroup \"" + ipGroup + "\" for policy " + policy_name)
                                            logging.warning(
                                                "Capturing IPGroup \"" +
                                                ipGroup + "\" for policy " +
                                                policy_name)
                                            IPGROUPS[ipGroup] = False
                                        else:
                                            # print("Ignoring IPGroup \"" + ipGroup + "\" for policy " + policy_name)
                                            logging.warning(
                                                "Ignoring IPGroup \"" +
                                                ipGroup + "\" for policy " +
                                                policy_name)
                # check matchCriteria - webApplicationCustomPolicies, webServiceCustomPolicies
                if "matchCriteria" in policyObj:
                    for mc in policyObj["matchCriteria"]:
                        # matchCriteria->lookupDatasetSearch->searchInLookupDataset
                        # matchCriteria->enrichmentData->searchInLookupDataset
                        if mc["type"] == "lookupDatasetSearch" or mc[
                                "type"] == "enrichmentData":
                            for dataset in mc["searchInLookupDataset"]:
                                # print("Capturing lookupDatasetSearch dataset \"" + dataset + "\" for policy " + policy_name)
                                logging.warning(
                                    "Capturing lookupDatasetSearch dataset \""
                                    + dataset + "\" for policy " + policy_name)
                                DATASETS[dataset] = False
                                # DATASETS[dataset] = ss.getDataset(AUTH["ENDPOINT"], primary_session_id, dataset)
                        # matchCriteria->datasetAttributeLookup[]->searchInLookupDataset
                        elif mc["type"] == "datasetAttributeLookup":
                            for dataset in mc["searchInLookupDataset"]:
                                if dataset not in ss.ignoreADCDatasets:
                                    # print("Capturing searchInLookupDataset dataset \"" + dataset + "\" for policy " + policy_name)
                                    logging.warning(
                                        "Capturing searchInLookupDataset dataset \""
                                        + dataset + "\" for policy " +
                                        policy_name)
                                    DATASETS[dataset] = False
                                else:
                                    # print("Ignoring dataset \"" + dataset + "\" for policy " + policy_name)
                                    logging.warning("Capturing dataset \"" +
                                                    dataset +
                                                    "\" for policy " +
                                                    policy_name)
                                    # DATASETS[dataset] = ss.getDataset(AUTH["ENDPOINT"], primary_session_id, dataset)
                                    # logging.warning("Retrieving \""+dataset+"\" dataset for policy "+policy_name)
                            # matchCriteria->datasetAttributeLookup->lookupDataset
                            if dataset not in ss.ignoreADCDatasets:
                                # print("Capturing lookupDataset dataset \"" + mc["lookupDataset"] + "\" for policy " + policy_name)
                                logging.warning(
                                    "Capturing lookupDataset dataset \"" +
                                    mc["lookupDataset"] + "\" for policy " +
                                    policy_name)
                                DATASETS[mc["lookupDataset"]] = False
                            else:
                                # print("Ignoring lookupDataset dataset \"" + mc["lookupDataset"] + "\" for policy " + policy_name)
                                logging.warning(
                                    "Ignoring lookupDataset dataset \"" +
                                    mc["lookupDataset"] + "\" for policy " +
                                    policy_name)
                            # DATASETS[mc["lookupDataset"]] = ss.getDataset(AUTH["ENDPOINT"], primary_session_id, mc["lookupDataset"])
                            # logging.warning("Retrieving \"" + mc["lookupDataset"] + "\" dataset for policy " + policy_name)
                        elif mc["type"] == "signatures":
                            sourcePolicies[policy_name]["isok"] = False
                            # 	for signature in mc["signatures"]:
                            # 		sourcePolicies[policy_name]["isok"] = False
                            # 		SIGNATURES[signature["name"]] = False
                            # 		logging.warning("Retrieving \""+signature["name"]+"\" signature for policy "+policy_name)
                            # 	# print(mc["type"])
                        # matchCriteria->sourceIpAddresses[]
                        # matchCriteria->proxyIpAddresses[]
                        elif mc["type"] == "sourceIpAddresses" or mc[
                                "type"] == "proxyIpAddresses":
                            for ipGroup in mc["ipGroups"]:
                                if ipGroup not in ss.ignoreADCIpGroups:
                                    # print("Capturing sourceIpAddresses ipGroup \"" + ipGroup + "\" for policy " + policy_name)
                                    logging.warning(
                                        "Capturing sourceIpAddresses ipGroup \""
                                        + ipGroup + "\" for policy " +
                                        policy_name)
                                    IPGROUPS[ipGroup] = False
                                else:
                                    # print("Ignoring sourceIpAddresses ipGroup \"" + ipGroup + "\" for policy " + policy_name)
                                    logging.warning(
                                        "Ignoring sourceIpAddresses ipGroup \""
                                        + ipGroup + "\" for policy " +
                                        policy_name)
                                # logging.warning("Retrieving IPGroup ("+ipGroup+") for policy " + policy_name)
                                # IPGROUPS[ipGroup] = ss.getIPGroup(AUTH["ENDPOINT"], primary_session_id, ipGroup)
        else:
            policyAttr["isok"] = False
            logging.warning("Unsupported policy type \"" +
                            policyAttr["policyType"] +
                            "\", skipping policy policy \"" + policy_name +
                            "\"")

    # load normalized list of datasets
    for dataset in DATASETS:
        logging.warning("Retrieving \"" + dataset + "\" dataset")
        DATASETS[dataset] = ss.getDataset(AUTH["ENDPOINT"], primary_session_id,
                                          dataset)
    # load normalized list of ipGroups
    for ipGroup in IPGROUPS:
        IPGROUPS[ipGroup] = ss.getIPGroup(AUTH["ENDPOINT"], primary_session_id,
                                          ipGroup)

    for MX in MXs:
        cur_session_id = ss.login(MX["ENDPOINT"], AUTH["USERNAME"],
                                  AUTH["PASSWORD"])
        # Migrate datasets
        for dataset in DATASETS:
            MX_SYNC_LOG_RECORDS["records"].append(
                ss.upsertDataset(MX["ENDPOINT"], cur_session_id,
                                 DATASETS[dataset]))
        for ipGroup in IPGROUPS:
            MX_SYNC_LOG_RECORDS["records"].append(
                ss.upsertIPGroup(MX["ENDPOINT"], cur_session_id,
                                 IPGROUPS[ipGroup]))

        for policy_name in sourcePolicies:
            policyAttr = sourcePolicies[policy_name]
            try:
                if policyAttr["policyType"] in ss.policyMapping:
                    #print(ss.policyMapping)
                    for asset in policyAttr["config.json"]["applyTo"]:
                        asset["serverGroupName"] = asset[
                            "serverGroupName"].replace(AUTH["REGION"],
                                                       MX["REGION"])
                    MX_SYNC_LOG_RECORDS["records"].append(
                        ss.upsertWebPolicy(MX["ENDPOINT"], cur_session_id,
                                           policy_name, policyAttr))
            except KeyError as e:
                logging.warning("KeyError:" + str(e))

        ss.logout(MX["ENDPOINT"], cur_session_id)
    datasetObj = json.loads(MX_SYNC_DATASET)
    ss.makeCall(AUTH["ENDPOINT"], primary_session_id,
                "/conf/dataSets/" + datasetObj["dataset-name"] + "/data",
                "POST", json.dumps(MX_SYNC_LOG_RECORDS))
    ss.logout(AUTH["ENDPOINT"], primary_session_id)