def test_sanity_ingest(self, mock_post, mock_aad, mock_block_blob,
                           mock_queue):
        """Test simple ingest"""

        ingest_client = KustoIngestClient(
            "https://ingest-somecluster.kusto.windows.net")

        ingestion_properties = IngestionProperties(database="database",
                                                   table="table",
                                                   dataFormat=DataFormat.csv)

        file_path = os.path.join(os.getcwd(), "azure-kusto-ingest", "tests",
                                 "input", "dataset.csv")

        ingest_client.ingest_from_multiple_files(
            [file_path],
            delete_sources_on_success=False,
            ingestion_properties=ingestion_properties)
示例#2
0
KUSTO_INGEST_CLIENT = KustoIngestClient(
    "https://ingest-toshetah.kusto.windows.net")

KUSTO_CLIENT.execute("PythonTest", ".drop table Deft ifexists")

# Sanity test - ingest from csv to a non-existing table
CSV_INGESTION_PROPERTIES = IngestionProperties(
    "PythonTest",
    "Deft",
    dataFormat=DataFormat.csv,
    mapping=Helpers.create_deft_table_csv_mappings())
CSV_FILE_PATH = os.path.join(os.getcwd(), "azure-kusto-ingest", "tests",
                             "input", "dataset.csv")
ZIPPED_CSV_FILE_PATH = os.path.join(os.getcwd(), "azure-kusto-ingest", "tests",
                                    "input", "dataset.csv.gz")
KUSTO_INGEST_CLIENT.ingest_from_multiple_files(
    [CSV_FILE_PATH, ZIPPED_CSV_FILE_PATH], False, CSV_INGESTION_PROPERTIES)

time.sleep(60)
RESPONSE = KUSTO_CLIENT.execute("PythonTest", "Deft | count")
for row in RESPONSE.iter_all():
    if int(row['Count']) == 20:
        print("Completed ingest from CSV mapping successfully.")
    else:
        print("Deft | count = " + row['Count'])

# Sanity test - ingest from json to an existing table
JSON_INGESTION_PROPERTIES = IngestionProperties(
    "PythonTest",
    "Deft",
    dataFormat=DataFormat.json,
    mapping=Helpers.create_deft_table_json_mappings())
示例#3
0
INGESTION_PROPERTIES = IngestionProperties(database="database name",
                                           table="table name",
                                           dataFormat=DataFormat.csv)

INGEST_CLIENT = KustoIngestClient(
    "https://ingest-<clustername>.kusto.windows.net")

KCSB = KustoConnectionStringBuilder.with_aad_application_key_authentication(
    "https://ingest-<clustername>.kusto.windows.net", "aad app id", "secret")
INGEST_CLIENT = KustoIngestClient(KCSB)

FILE_DESCRIPTOR = FileDescriptor(
    "E:\\filePath.csv", 3333)  # 3333 is the raw size of the data in bytes.
INGEST_CLIENT.ingest_from_multiple_files(
    [FILE_DESCRIPTOR],
    delete_sources_on_success=True,
    ingestion_properties=INGESTION_PROPERTIES)

INGEST_CLIENT.ingest_from_multiple_files(
    ["E:\\filePath.csv"],
    delete_sources_on_success=True,
    ingestion_properties=INGESTION_PROPERTIES)

BLOB_DESCRIPTOR = BlobDescriptor(
    "https://path-to-blob.csv.gz?sas",
    10)  # 10 is the raw size of the data in bytes.
INGEST_CLIENT.ingest_from_multiple_blobs(
    [BLOB_DESCRIPTOR],
    delete_sources_on_success=True,
    ingestion_properties=INGESTION_PROPERTIES)
示例#4
0
ingestion_properties = IngestionProperties(database="database name",
                                           table="table name",
                                           dataFormat=DataFormat.csv)

ingest_client = KustoIngestClient(
    "https://ingest-<clustername>.kusto.windows.net")
ingest_client = KustoIngestClient(
    "https://ingest-<clustername>.kusto.windows.net",
    client_id="aad app id",
    client_secret="secret")

file_descriptor = FileDescriptor(
    "E:\\filePath.csv", 3333)  # 3333 is the raw size of the data in bytes.
ingest_client.ingest_from_multiple_files(
    [file_descriptor],
    delete_sources_on_success=True,
    ingestion_properties=ingestion_properties)

ingest_client.ingest_from_multiple_files(
    ["E:\\filePath.csv"],
    delete_sources_on_success=True,
    ingestion_properties=ingestion_properties)

blob_descriptor = BlobDescriptor(
    "https://path-to-blob.csv.gz?sas",
    10)  # 10 is the raw size of the data in bytes.
ingest_client.ingest_from_multiple_blobs(
    [blob_descriptor],
    delete_sources_on_success=True,
    ingestion_properties=ingestion_properties)
示例#5
0
class KustoLogger(object):
    _log_keys = [
        "TIMESTAMP", "Role", "RoleInstance", "Level", "ProviderName",
        "ExternalServiceName", "IncidentId", "Status", "LogType", "Context"
    ]

    def __init__(self):
        self.ingest_client = KustoIngestClient(
            credentials.kusto_ppe_ingest_connection,
            client_id=credentials.kusto_application_id,
            client_secret=credentials.kusto_application_key)
        self.properties = IngestionProperties(database="BingAdsUCM",
                                              table="PerfIcMAlertEvent",
                                              dataFormat=DataFormat.csv)
        self.log_buffer_file = "kusto_log_buffer.csv"

    def PerfNormal(self, externalServiceName, requestUrl, detectedDate):
        context = {
            "ExternalServiceName": externalServiceName,
            "RequestUrl": requestUrl,
            "DetectedDate": detectedDate
        }
        self._write_log(log_type=KustoLogType.perf_normal,
                        is_succeed=True,
                        incident_id='',
                        context=context,
                        externalServiceName=externalServiceName,
                        push_remote=True)

    def PerfAnomaly(self, externalServiceName, requestUrl, detectedDate,
                    incident_id, log):
        context = {
            "ExternalServiceName": externalServiceName,
            "RequestUrl": requestUrl,
            "DetectedDate": detectedDate,
            "IncidentId": incident_id,
            "Log": log
        }
        self._write_log(log_type=KustoLogType.perf_anomaly,
                        is_succeed=True,
                        incident_id=incident_id,
                        context=context,
                        externalServiceName=externalServiceName,
                        push_remote=True)

    def ExecuteError(self, logType, externalServiceName, requestUrl,
                     detectedDate, log):
        context = {
            "ExternalServiceName": externalServiceName,
            "RequestUrl": requestUrl,
            "DetectedDate": detectedDate,
            "Log": log
        }
        self._write_log(log_type=logType,
                        is_succeed=False,
                        incident_id='',
                        context=context,
                        externalServiceName=externalServiceName,
                        push_remote=True)

    def _write_log(self,
                   log_type: object,
                   is_succeed: object,
                   incident_id: object = None,
                   context: object = None,
                   externalServiceName: object = None,
                   push_remote: object = False):
        now = str(datetime.utcnow())
        log = {
            "TIMESTAMP": now,
            "Role": "Microsoft.UCM.PerfIcMAlert",
            "RoleInstance": "Microsoft.UCM.PerfIcMAlert_IN_1",
            "Level": 4,
            "ProviderName": "PyEventSource",
            "ExternalServiceName": externalServiceName,
            "IncidentId": incident_id,
            "Status": "Success" if is_succeed else "Failure",
            "LogType": log_type,
            "Context": str(context)
        }

        with open(self.log_buffer_file, "a+") as file:
            f_csv = csv.DictWriter(file, self._log_keys)
            f_csv.writerow(log)
        if push_remote is True:
            self._push_log()

    def _push_log(self):
        self.ingest_client.ingest_from_multiple_files(
            [self.log_buffer_file],
            delete_sources_on_success=True,
            ingestion_properties=self.properties)
        # clean buffer
        open(self.log_buffer_file, "w").close()