def collect_events(helper, ew):
    global_account = helper.get_arg("azure_app_account")
    client_id = helper.get_global_setting("client_id")
    client_secret = helper.get_global_setting("client_secret")
    tenant_id = helper.get_global_setting("tenant_id")
    collection_period = helper.get_arg("collection_period")
    source_type = helper.get_arg("data_sourcetype")

    
    access_token = azauth.get_graph_access_token(client_id, client_secret, tenant_id, helper)
    
    if(access_token):
        url = "https://graph.microsoft.com/beta/reports/getTeamsDeviceUsageUserDetail(period='D%s')?$format=application/json" % collection_period

        try:
            subscriptions = azutil.get_items(helper, access_token, url)
        
            for subscription in subscriptions:
                event = helper.new_event(
                    data=json.dumps(subscription),
                    source=helper.get_input_type(), 
                    index=helper.get_output_index(),
                    sourcetype=source_type)
                ew.write_event(event)
        except Exception, e:
            raise e
def _index_metrics(helper, ew, access_token, resource_obj, metric_url,
                   requested_metric_statistics, metric_aggregations):

    # Extract the subscription ID to include in the event
    subscription_id = ''
    re_sub = re.compile(r"subscriptions\/(.*?)\/")
    try:
        subscription_id = re_sub.search(
            resource_obj["resource_id"].lower()).group(1)
    except:
        helper.log_error(
            '_Splunk_ Regex parsing subscription_id failed with error: {0}'.
            format(sys.exc_info()[0]))

    try:
        resource_metrics = azutils.get_items(helper,
                                             access_token,
                                             metric_url,
                                             items=[])
    except Exception as e:
        helper.log_error(
            "_Splunk_ Error getting metrics for resource '%s'. Detail: %s" %
            (resource_obj["resource_id"], str(e)))

    for metric in resource_metrics:
        metric_name = metric["name"]["value"]
        metric_unit = metric["unit"]

        for timeSeries in metric["timeseries"]:
            for data in timeSeries["data"]:
                metric_obj = {}
                metric_obj["host"] = resource_obj["resource_id"]
                metric_obj["metric_name"] = metric_name
                metric_obj["_time"] = data["timeStamp"]
                metric_obj["subscription_id"] = subscription_id
                metric_obj["unit"] = metric_unit
                metric_obj["namespace"] = resource_obj["resource_type"]

                for metric_stat in requested_metric_statistics:
                    # Only collect the requested statistics
                    if metric_stat in metric_aggregations[metric_name]:
                        if metric_stat in data:
                            metric_obj[metric_stat] = data[metric_stat]

                event = helper.new_event(
                    data=json.dumps(metric_obj),
                    index=helper.get_output_index(),
                    sourcetype=helper.get_arg("source_type"))
                ew.write_event(event)
def _cache_metric_definitions(helper, access_token, resource_id,
                              resource_type):
    helper.log_debug(
        "_Splunk_ Getting metric definitions from Azure REST for type '%s'" %
        resource_type)
    metric_definition_url = "https://management.azure.com%s/providers/microsoft.insights/metricDefinitions?api-version=2018-01-01" % resource_id
    metric_definitions = azutils.get_items(helper,
                                           access_token,
                                           metric_definition_url,
                                           items=[])

    metrics = []
    for metric in metric_definitions:
        metric_obj = {}
        metric_obj["name"] = metric['name']['value']

        time_grains = []
        for time_grain in metric["metricAvailabilities"]:
            time_grains.append(time_grain["timeGrain"])
        metric_obj["time_grains"] = time_grains

        aggregation_types = []
        for aggregation_type in metric["supportedAggregationTypes"]:
            aggregation_types.append(aggregation_type.lower())
        metric_obj["aggregation_types"] = aggregation_types

        metrics.append(metric_obj)

    if len(metrics) > 0:
        # Cache the metric definition list as a check point
        checkpoint_data = {}
        checkpoint_data["last_updated_date"] = str(datetime.datetime.now())
        checkpoint_data["resource_type"] = resource_type
        checkpoint_data["metrics"] = metrics
        helper.log_debug(
            "_Splunk_ Saving metric definitions from Azure REST for type '%s'.  Definitions: %s"
            % (resource_type, str(metrics)))
        helper.save_check_point(resource_type, checkpoint_data)