Beispiel #1
0
def returner(ret):
    '''
    '''
    opts_list = _get_options()

    clouds = get_cloud_details()

    for opts in opts_list:
        proxy = opts['proxy']
        timeout = opts['timeout']
        custom_fields = opts['custom_fields']

        indexer = opts['indexer']
        port = opts['port']
        password = opts['password']
        user = opts['user']

        ## assign all the things
        data = ret['return']
        minion_id = ret['id']
        jid = ret['jid']
        master = __grains__['master']
        fqdn = __grains__['fqdn']
        fqdn = fqdn if fqdn else minion_id
        try:
            fqdn_ip4 = __grains__['fqdn_ip4'][0]
        except IndexError:
            fqdn_ip4 = __grains__['ipv4'][0]
        if fqdn_ip4.startswith('127.'):
            for ip4_addr in __grains__['ipv4']:
                if ip4_addr and not ip4_addr.startswith('127.'):
                    fqdn_ip4 = ip4_addr
                    break

        if not data:
            return
        else:
            for query in data:
                for query_name, query_results in query.iteritems():
                    for query_result in query_results['data']:
                        event = {}
                        payload = {}
                        event.update(query_result)
                        event.update({'query': query_name})
                        event.update({'job_id': jid})
                        event.update({'master': master})
                        event.update({'minion_id': minion_id})
                        event.update({'dest_host': fqdn})
                        event.update({'dest_ip': fqdn_ip4})

                        for cloud in clouds:
                            event.update(cloud)

                        for custom_field in custom_fields:
                            custom_field_name = 'custom_' + custom_field
                            custom_field_value = __salt__['config.get'](
                                custom_field, '')
                            if isinstance(custom_field_value, str):
                                event.update(
                                    {custom_field_name: custom_field_value})
                            elif isinstance(custom_field_value, list):
                                custom_field_value = ','.join(
                                    custom_field_value)
                                event.update(
                                    {custom_field_name: custom_field_value})

                        payload.update({'host': fqdn})
                        payload.update({'index': opts['index']})
                        payload.update({'sourcetype': opts['sourcetype']})
                        payload.update({'event': event})

                        # If the osquery query includes a field called 'time' it will be checked.
                        # If it's within the last year, it will be used as the eventtime.
                        event_time = query_result.get('time', '')
                        try:
                            if (datetime.fromtimestamp(time.time()) -
                                    datetime.fromtimestamp(
                                        float(event_time))).days > 365:
                                event_time = ''
                        except:
                            event_time = ''
                        finally:
                            rdy = json.dumps(payload)
                            requests.post('{}:{}/hubble/nebula'.format(
                                indexer, port),
                                          rdy,
                                          auth=HTTPBasicAuth(user, password))
    return
Beispiel #2
0
def returner(ret):
    opts_list = _get_options()

    # Get cloud details
    clouds = get_cloud_details()

    for opts in opts_list:
        log.info('Options: %s' % json.dumps(opts))
        http_event_collector_key = opts['token']
        http_event_collector_host = opts['indexer']
        http_event_collector_port = opts['port']
        hec_ssl = opts['http_event_server_ssl']
        proxy = opts['proxy']
        timeout = opts['timeout']
        custom_fields = opts['custom_fields']

        # Set up the fields to be extracted at index time. The field values must be strings.
        # Note that these fields will also still be available in the event data
        index_extracted_fields = [
            'aws_instance_id', 'aws_account_id', 'azure_vmId'
        ]
        try:
            index_extracted_fields.extend(opts['index_extracted_fields'])
        except TypeError:
            pass

        # Set up the collector
        hec = http_event_collector(http_event_collector_key,
                                   http_event_collector_host,
                                   http_event_port=http_event_collector_port,
                                   http_event_server_ssl=hec_ssl,
                                   proxy=proxy,
                                   timeout=timeout)
        # st = 'salt:hubble:nova'
        data = ret['return']
        minion_id = ret['id']
        jid = ret['jid']
        fqdn = __grains__['fqdn']
        # Sometimes fqdn is blank. If it is, replace it with minion_id
        fqdn = fqdn if fqdn else minion_id
        master = __grains__['master']
        try:
            fqdn_ip4 = __grains__['fqdn_ip4'][0]
        except IndexError:
            fqdn_ip4 = __grains__['ipv4'][0]
        if fqdn_ip4.startswith('127.'):
            for ip4_addr in __grains__['ipv4']:
                if ip4_addr and not ip4_addr.startswith('127.'):
                    fqdn_ip4 = ip4_addr
                    break

        if __grains__['master']:
            master = __grains__['master']
        else:
            master = socket.gethostname(
            )  # We *are* the master, so use our hostname

        if not isinstance(data, dict):
            log.error('Data sent to splunk_nova_return was not formed as a '
                      'dict:\n{0}'.format(data))
            return

        for fai in data.get('Failure', []):
            check_id = fai.keys()[0]
            payload = {}
            event = {}
            event.update({'check_result': 'Failure'})
            event.update({'check_id': check_id})
            event.update({'job_id': jid})
            if not isinstance(fai[check_id], dict):
                event.update({'description': fai[check_id]})
            elif 'description' in fai[check_id]:
                for key, value in fai[check_id].iteritems():
                    if key not in ['tag']:
                        event[key] = value
            event.update({'master': master})
            event.update({'minion_id': minion_id})
            event.update({'dest_host': fqdn})
            event.update({'dest_ip': fqdn_ip4})

            for cloud in clouds:
                event.update(cloud)

            for custom_field in custom_fields:
                custom_field_name = 'custom_' + custom_field
                custom_field_value = __salt__['config.get'](custom_field, '')
                if isinstance(custom_field_value, str):
                    event.update({custom_field_name: custom_field_value})
                elif isinstance(custom_field_value, list):
                    custom_field_value = ','.join(custom_field_value)
                    event.update({custom_field_name: custom_field_value})

            payload.update({'host': fqdn})
            payload.update({'index': opts['index']})
            payload.update({'sourcetype': opts['sourcetype']})
            payload.update({'event': event})

            # Potentially add metadata fields:
            fields = {}
            for item in index_extracted_fields:
                if item in payload['event'] and not isinstance(
                        payload['event'][item], (list, dict, tuple)):
                    fields[item] = str(payload['event'][item])
            if fields:
                payload.update({'fields': fields})

            hec.batchEvent(payload)

        for suc in data.get('Success', []):
            check_id = suc.keys()[0]
            payload = {}
            event = {}
            event.update({'check_result': 'Success'})
            event.update({'check_id': check_id})
            event.update({'job_id': jid})
            if not isinstance(suc[check_id], dict):
                event.update({'description': suc[check_id]})
            elif 'description' in suc[check_id]:
                for key, value in suc[check_id].iteritems():
                    if key not in ['tag']:
                        event[key] = value
            event.update({'master': master})
            event.update({'minion_id': minion_id})
            event.update({'dest_host': fqdn})
            event.update({'dest_ip': fqdn_ip4})

            for cloud in clouds:
                event.update(cloud)

            for custom_field in custom_fields:
                custom_field_name = 'custom_' + custom_field
                custom_field_value = __salt__['config.get'](custom_field, '')
                if isinstance(custom_field_value, str):
                    event.update({custom_field_name: custom_field_value})
                elif isinstance(custom_field_value, list):
                    custom_field_value = ','.join(custom_field_value)
                    event.update({custom_field_name: custom_field_value})

            payload.update({'host': fqdn})
            payload.update({'sourcetype': opts['sourcetype']})
            payload.update({'index': opts['index']})
            payload.update({'event': event})

            # Potentially add metadata fields:
            fields = {}
            for item in index_extracted_fields:
                if item in payload['event'] and not isinstance(
                        payload['event'][item], (list, dict, tuple)):
                    fields[item] = str(payload['event'][item])
            if fields:
                payload.update({'fields': fields})

            hec.batchEvent(payload)

        if data.get('Compliance', None):
            payload = {}
            event = {}
            event.update({'job_id': jid})
            event.update({'compliance_percentage': data['Compliance']})
            event.update({'master': master})
            event.update({'minion_id': minion_id})
            event.update({'dest_host': fqdn})
            event.update({'dest_ip': fqdn_ip4})

            for cloud in clouds:
                event.update(cloud)

            for custom_field in custom_fields:
                custom_field_name = 'custom_' + custom_field
                custom_field_value = __salt__['config.get'](custom_field, '')
                if isinstance(custom_field_value, str):
                    event.update({custom_field_name: custom_field_value})
                elif isinstance(custom_field_value, list):
                    custom_field_value = ','.join(custom_field_value)
                    event.update({custom_field_name: custom_field_value})

            payload.update({'host': fqdn})
            payload.update({'sourcetype': opts['sourcetype']})
            payload.update({'index': opts['index']})
            payload.update({'event': event})

            # Potentially add metadata fields:
            fields = {}
            for item in index_extracted_fields:
                if item in payload['event'] and not isinstance(
                        payload['event'][item], (list, dict, tuple)):
                    fields[item] = str(payload['event'][item])
            if fields:
                payload.update({'fields': fields})

            hec.batchEvent(payload)

        hec.flushBatch()
    return
def returner(ret):
    '''
    '''
    opts_list = _get_options()

    clouds = get_cloud_details()

    for opts in opts_list:
        proxy = opts['proxy']
        timeout = opts['timeout']
        custom_fields = opts['custom_fields']

        indexer = opts['indexer']
        port = opts['port']
        password = opts['password']
        user = opts['user']

        data = ret['return']
        minion_id = ret['id']
        jid = ret['jid']
        fqdn = __grains__['fqdn']
        # Sometimes fqdn is blank. If it is, replace it with minion_id
        fqdn = fqdn if fqdn else minion_id
        master = __grains__['master']
        try:
            fqdn_ip4 = __grains__['fqdn_ip4'][0]
        except IndexError:
            fqdn_ip4 = __grains__['ipv4'][0]
        if fqdn_ip4.startswith('127.'):
            for ip4_addr in __grains__['ipv4']:
                if ip4_addr and not ip4_addr.startswith('127.'):
                    fqdn_ip4 = ip4_addr
                    break

        if __grains__['master']:
            master = __grains__['master']
        else:
            master = socket.gethostname(
            )  # We *are* the master, so use our hostname

        if not isinstance(data, dict):
            log.error('Data sent to splunk_nova_return was not formed as a '
                      'dict:\n{0}'.format(data))
            return

        for fai in data.get('Failure', []):
            check_id = fai.keys()[0]
            payload = {}
            event = {}
            event.update({'check_result': 'Failure'})
            event.update({'check_id': check_id})
            event.update({'job_id': jid})
            if not isinstance(fai[check_id], dict):
                event.update({'description': fai[check_id]})
            elif 'description' in fai[check_id]:
                for key, value in fai[check_id].iteritems():
                    if key not in ['tag']:
                        event[key] = value
            event.update({'master': master})
            event.update({'minion_id': minion_id})
            event.update({'dest_host': fqdn})
            event.update({'dest_ip': fqdn_ip4})

            for cloud in clouds:
                event.update(cloud)

            for custom_field in custom_fields:
                custom_field_name = 'custom_' + custom_field
                custom_field_value = __salt__['config.get'](custom_field, '')
                if isinstance(custom_field_value, str):
                    event.update({custom_field_name: custom_field_value})
                elif isinstance(custom_field_value, list):
                    custom_field_value = ','.join(custom_field_value)
                    event.update({custom_field_name: custom_field_value})

            payload.update({'host': fqdn})
            payload.update({'index': opts['index']})
            payload.update({'sourcetype': opts['sourcetype']})
            payload.update({'event': event})

            rdy = json.dumps(payload)
            requests.post('{}:{}/hubble/nova'.format(indexer, port),
                          rdy,
                          auth=HTTPBasicAuth(user, password))

        for suc in data.get('Success', []):
            check_id = suc.keys()[0]
            payload = {}
            event = {}
            event.update({'check_result': 'Success'})
            event.update({'check_id': check_id})
            event.update({'job_id': jid})
            if not isinstance(suc[check_id], dict):
                event.update({'description': suc[check_id]})
            elif 'description' in suc[check_id]:
                for key, value in suc[check_id].iteritems():
                    if key not in ['tag']:
                        event[key] = value
            event.update({'master': master})
            event.update({'minion_id': minion_id})
            event.update({'dest_host': fqdn})
            event.update({'dest_ip': fqdn_ip4})

            for cloud in clouds:
                event.update(cloud)

            for custom_field in custom_fields:
                custom_field_name = 'custom_' + custom_field
                custom_field_value = __salt__['config.get'](custom_field, '')
                if isinstance(custom_field_value, str):
                    event.update({custom_field_name: custom_field_value})
                elif isinstance(custom_field_value, list):
                    custom_field_value = ','.join(custom_field_value)
                    event.update({custom_field_name: custom_field_value})

            payload.update({'host': fqdn})
            payload.update({'index': opts['index']})
            payload.update({'sourcetype': opts['sourcetype']})
            payload.update({'event': event})

            rdy = json.dumps(payload)
            requests.post('{}:{}/hubble/nova'.format(indexer, port),
                          rdy,
                          auth=HTTPBasicAuth(user, password))

        if data.get('Compliance', None):
            payload = {}
            event = {}
            event.update({'job_id': jid})
            event.update({'compliance_percentage': data['Compliance']})
            event.update({'master': master})
            event.update({'minion_id': minion_id})
            event.update({'dest_host': fqdn})
            event.update({'dest_ip': fqdn_ip4})

            for cloud in clouds:
                event.update(cloud)

            for custom_field in custom_fields:
                custom_field_name = 'custom_' + custom_field
                custom_field_value = __salt__['config.get'](custom_field, '')
                if isinstance(custom_field_value, str):
                    event.update({custom_field_name: custom_field_value})
                elif isinstance(custom_field_value, list):
                    custom_field_value = ','.join(custom_field_value)
                    event.update({custom_field_name: custom_field_value})

            payload.update({'host': fqdn})
            payload.update({'index': opts['index']})
            payload.update({'sourcetype': opts['sourcetype']})
            payload.update({'event': event})

            rdy = json.dumps(payload)
            requests.post('{}:{}/hubble/nova'.format(indexer, port),
                          rdy,
                          auth=HTTPBasicAuth(user, password))

    return
Beispiel #4
0
def returner(ret):
    try:
        if isinstance(ret, dict) and not ret.get('return'):
            # Empty single return, let's not do any setup or anything
            return

        opts_list = _get_options()
        # Get cloud details
        clouds = get_cloud_details()

        for opts in opts_list:
            logging.info('Options: %s' % json.dumps(opts))
            http_event_collector_key = opts['token']
            http_event_collector_host = opts['indexer']
            http_event_collector_port = opts['port']
            hec_ssl = opts['http_event_server_ssl']
            proxy = opts['proxy']
            timeout = opts['timeout']
            custom_fields = opts['custom_fields']

            # Set up the fields to be extracted at index time. The field values must be strings.
            # Note that these fields will also still be available in the event data
            index_extracted_fields = [
                'aws_instance_id', 'aws_account_id', 'azure_vmId'
            ]
            try:
                index_extracted_fields.extend(opts['index_extracted_fields'])
            except TypeError:
                pass

            # Set up the collector
            hec = http_event_collector(
                http_event_collector_key,
                http_event_collector_host,
                http_event_port=http_event_collector_port,
                http_event_server_ssl=hec_ssl,
                proxy=proxy,
                timeout=timeout)
            # Check whether or not data is batched:
            if isinstance(ret, dict):  # Batching is disabled
                data = [ret]
            else:
                data = ret
            # Sometimes there are duplicate events in the list. Dedup them:
            data = _dedupList(data)
            minion_id = __opts__['id']
            fqdn = __grains__['fqdn']
            # Sometimes fqdn is blank. If it is, replace it with minion_id
            fqdn = fqdn if fqdn else minion_id
            master = __grains__['master']
            try:
                fqdn_ip4 = __grains__['fqdn_ip4'][0]
            except IndexError:
                fqdn_ip4 = __grains__['ipv4'][0]
            if fqdn_ip4.startswith('127.'):
                for ip4_addr in __grains__['ipv4']:
                    if ip4_addr and not ip4_addr.startswith('127.'):
                        fqdn_ip4 = ip4_addr
                        break

            alerts = []
            for item in data:
                events = item['return']
                if not isinstance(events, list):
                    events = [events]
                alerts.extend(events)

            for alert in alerts:
                event = {}
                payload = {}
                if ('change' in alert):  # Linux, normal pulsar
                    # The second half of the change will be '|IN_ISDIR' for directories
                    change = alert['change'].split('|')[0]
                    # Skip the IN_IGNORED events
                    if change == 'IN_IGNORED':
                        continue
                    if len(alert['change'].split('|')) == 2:
                        object_type = 'directory'
                    else:
                        object_type = 'file'

                    actions = defaultdict(lambda: 'unknown')
                    actions['IN_ACCESS'] = 'read'
                    actions['IN_ATTRIB'] = 'acl_modified'
                    actions['IN_CLOSE_NOWRITE'] = 'read'
                    actions['IN_CLOSE_WRITE'] = 'read'
                    actions['IN_CREATE'] = 'created'
                    actions['IN_DELETE'] = 'deleted'
                    actions['IN_DELETE_SELF'] = 'deleted'
                    actions['IN_MODIFY'] = 'modified'
                    actions['IN_MOVE_SELF'] = 'modified'
                    actions['IN_MOVED_FROM'] = 'modified'
                    actions['IN_MOVED_TO'] = 'modified'
                    actions['IN_OPEN'] = 'read'
                    actions['IN_MOVE'] = 'modified'
                    actions['IN_CLOSE'] = 'read'

                    event['action'] = actions[change]
                    event['change_type'] = 'filesystem'
                    event['object_category'] = object_type
                    event['object_path'] = alert['path']
                    event['file_name'] = alert['name']
                    event['file_path'] = alert['tag']
                    event['pulsar_config'] = alert['pulsar_config']

                    if alert[
                            'stats']:  # Gather more data if the change wasn't a delete
                        stats = alert['stats']
                        event['object_id'] = stats['inode']
                        event['file_acl'] = stats['mode']
                        event['file_create_time'] = stats['ctime']
                        event['file_modify_time'] = stats['mtime']
                        event['file_size'] = stats[
                            'size'] / 1024.0  # Convert bytes to kilobytes
                        event['user'] = stats['user']
                        event['group'] = stats['group']
                        if object_type == 'file':
                            event['file_hash'] = alert['checksum']
                            event['file_hash_type'] = alert['checksum_type']

                else:  # Windows, win_pulsar
                    change = alert['Accesses']
                    if alert['Hash'] == 'Item is a directory':
                        object_type = 'directory'
                    else:
                        object_type = 'file'

                    actions = defaultdict(lambda: 'unknown')
                    actions['Delete'] = 'deleted'
                    actions['Read Control'] = 'read'
                    actions['Write DAC'] = 'acl_modified'
                    actions['Write Owner'] = 'modified'
                    actions['Synchronize'] = 'modified'
                    actions['Access Sys Sec'] = 'read'
                    actions['Read Data'] = 'read'
                    actions['Write Data'] = 'modified'
                    actions['Append Data'] = 'modified'
                    actions['Read EA'] = 'read'
                    actions['Write EA'] = 'modified'
                    actions['Execute/Traverse'] = 'read'
                    actions['Read Attributes'] = 'read'
                    actions['Write Attributes'] = 'acl_modified'
                    actions['Query Key Value'] = 'read'
                    actions['Set Key Value'] = 'modified'
                    actions['Create Sub Key'] = 'created'
                    actions['Enumerate Sub-Keys'] = 'read'
                    actions['Notify About Changes to Keys'] = 'read'
                    actions['Create Link'] = 'created'
                    actions['Print'] = 'read'

                    event['action'] = actions[change]
                    event['change_type'] = 'filesystem'
                    event['object_category'] = object_type
                    event['object_path'] = alert['Object Name']
                    event['file_name'] = os.path.basename(alert['Object Name'])
                    event['file_path'] = os.path.dirname(alert['Object Name'])
                    event['file_path'] = alert['pulsar_config']
                    # TODO: Should we be reporting 'EntryType' or 'TimeGenerated?
                    #   EntryType reports whether attempt to change was successful.

                event.update({'master': master})
                event.update({'minion_id': minion_id})
                event.update({'dest_host': fqdn})
                event.update({'dest_ip': fqdn_ip4})

                for cloud in clouds:
                    event.update(cloud)

                for custom_field in custom_fields:
                    custom_field_name = 'custom_' + custom_field
                    custom_field_value = __salt__['config.get'](custom_field,
                                                                '')
                    if isinstance(custom_field_value, str):
                        event.update({custom_field_name: custom_field_value})
                    elif isinstance(custom_field_value, list):
                        custom_field_value = ','.join(custom_field_value)
                        event.update({custom_field_name: custom_field_value})

                payload.update({'host': fqdn})
                payload.update({'index': opts['index']})
                payload.update({'sourcetype': opts['sourcetype']})
                payload.update({'event': event})

                # Potentially add metadata fields:
                fields = {}
                for item in index_extracted_fields:
                    if item in payload['event'] and not isinstance(
                            payload['event'][item], (list, dict, tuple)):
                        fields[item] = str(payload['event'][item])
                if fields:
                    payload.update({'fields': fields})

                hec.batchEvent(payload)

            hec.flushBatch()
    except:
        log.exception('Error ocurred in splunk_pulsar_return')
    return
Beispiel #5
0
def returner(ret):
    opts_list = _get_options()

    # Get cloud details
    clouds = get_cloud_details()

    for opts in opts_list:
        logging.info('Options: %s' % json.dumps(opts))
        http_event_collector_key = opts['token']
        http_event_collector_host = opts['indexer']
        http_event_collector_port = opts['port']
        hec_ssl = opts['http_event_server_ssl']
        proxy = opts['proxy']
        timeout = opts['timeout']
        custom_fields = opts['custom_fields']

        # Set up the fields to be extracted at index time. The field values must be strings.
        # Note that these fields will also still be available in the event data
        index_extracted_fields = [
            'aws_instance_id', 'aws_account_id', 'azure_vmId'
        ]
        try:
            index_extracted_fields.extend(opts['index_extracted_fields'])
        except TypeError:
            pass

        # Set up the collector
        hec = http_event_collector(http_event_collector_key,
                                   http_event_collector_host,
                                   http_event_port=http_event_collector_port,
                                   http_event_server_ssl=hec_ssl,
                                   proxy=proxy,
                                   timeout=timeout)

        # st = 'salt:hubble:nova'
        data = ret['return']
        minion_id = ret['id']
        jid = ret['jid']
        master = __grains__['master']
        fqdn = __grains__['fqdn']
        # Sometimes fqdn is blank. If it is, replace it with minion_id
        fqdn = fqdn if fqdn else minion_id
        try:
            fqdn_ip4 = __grains__['fqdn_ip4'][0]
        except IndexError:
            fqdn_ip4 = __grains__['ipv4'][0]
        if fqdn_ip4.startswith('127.'):
            for ip4_addr in __grains__['ipv4']:
                if ip4_addr and not ip4_addr.startswith('127.'):
                    fqdn_ip4 = ip4_addr
                    break

        if not data:
            return
        else:
            for query in data:
                for query_name, query_results in query.iteritems():
                    for query_result in query_results['data']:
                        event = {}
                        payload = {}
                        event.update(query_result)
                        event.update({'query': query_name})
                        event.update({'job_id': jid})
                        event.update({'master': master})
                        event.update({'minion_id': minion_id})
                        event.update({'dest_host': fqdn})
                        event.update({'dest_ip': fqdn_ip4})

                        for cloud in clouds:
                            event.update(cloud)

                        for custom_field in custom_fields:
                            custom_field_name = 'custom_' + custom_field
                            custom_field_value = __salt__['config.get'](
                                custom_field, '')
                            if isinstance(custom_field_value, str):
                                event.update(
                                    {custom_field_name: custom_field_value})
                            elif isinstance(custom_field_value, list):
                                custom_field_value = ','.join(
                                    custom_field_value)
                                event.update(
                                    {custom_field_name: custom_field_value})

                        payload.update({'host': fqdn})
                        payload.update({'index': opts['index']})
                        if opts['add_query_to_sourcetype']:
                            payload.update({
                                'sourcetype':
                                "%s_%s" % (opts['sourcetype'], query_name)
                            })
                        else:
                            payload.update({'sourcetype': opts['sourcetype']})
                        payload.update({'event': event})

                        # Potentially add metadata fields:
                        fields = {}
                        for item in index_extracted_fields:
                            if item in payload['event'] and not isinstance(
                                    payload['event'][item],
                                (list, dict, tuple)):
                                fields[item] = str(payload['event'][item])
                        if fields:
                            payload.update({'fields': fields})

                        # If the osquery query includes a field called 'time' it will be checked.
                        # If it's within the last year, it will be used as the eventtime.
                        event_time = query_result.get('time', '')
                        try:
                            if (datetime.fromtimestamp(time.time()) -
                                    datetime.fromtimestamp(
                                        float(event_time))).days > 365:
                                event_time = ''
                        except:
                            event_time = ''
                        finally:
                            hec.batchEvent(payload, eventtime=event_time)

        hec.flushBatch()
    return
def returner(ret):
    '''
    '''
    if isinstance(ret, dict) and not ret.get('return'):
        return

    opts_list = _get_options()

    clouds = get_cloud_details()

    for opts in opts_list:
        proxy = opts['proxy']
        timeout = opts['timeout']
        custom_fields = opts['custom_fields']

        indexer = opts['indexer']
        port = opts['port']
        password = opts['password']
        user = opts['user']

        data = _dedupList(ret['return'])
        minion_id = __opts__['id']
        fqdn = __grains__['fqdn']
        fqdn = fqdn if fqdn else minion_id
        master = __grains__['master']
        try:
            fqdn_ip4 = __grains__['fqdn_ip4'][0]
        except IndexError:
            fqdn_ip4 = __grains__['ipv4'][0]
        if fqdn_ip4.startswith('127.'):
            for ip4_addr in __grains__['ipv4']:
                if ip4_addr and not ip4_addr.startswith('127.'):
                    fqdn_ip4 = ip4_addr
                    break

        alerts = []
        for item in data:
            events = item
            if not isinstance(events, list):
                events = [events]
            alerts.extend(events)

        for alert in alerts:
            event = {}
            payload = {}
            if ('change' in alert):  # Linux, normal pulsar
                # The second half of the change will be '|IN_ISDIR' for directories
                change = alert['change'].split('|')[0]
                # Skip the IN_IGNORED events
                if change == 'IN_IGNORED':
                    continue
                if len(alert['change'].split('|')) == 2:
                    object_type = 'directory'
                else:
                    object_type = 'file'

                actions = defaultdict(lambda: 'unknown')
                actions['IN_ACCESS'] = 'read'
                actions['IN_ATTRIB'] = 'acl_modified'
                actions['IN_CLOSE_NOWRITE'] = 'read'
                actions['IN_CLOSE_WRITE'] = 'read'
                actions['IN_CREATE'] = 'created'
                actions['IN_DELETE'] = 'deleted'
                actions['IN_DELETE_SELF'] = 'deleted'
                actions['IN_MODIFY'] = 'modified'
                actions['IN_MOVE_SELF'] = 'modified'
                actions['IN_MOVED_FROM'] = 'modified'
                actions['IN_MOVED_TO'] = 'modified'
                actions['IN_OPEN'] = 'read'
                actions['IN_MOVE'] = 'modified'
                actions['IN_CLOSE'] = 'read'

                event['action'] = actions[change]
                event['change_type'] = 'filesystem'
                event['object_category'] = object_type
                event['object_path'] = alert['path']
                event['file_name'] = alert['name']
                event['file_path'] = alert['tag']

                if alert[
                        'stats']:  # Gather more data if the change wasn't a delete
                    stats = alert['stats']
                    event['object_id'] = stats['inode']
                    event['file_acl'] = stats['mode']
                    event['file_create_time'] = stats['ctime']
                    event['file_modify_time'] = stats['mtime']
                    event['file_size'] = stats[
                        'size'] / 1024.0  # Convert bytes to kilobytes
                    event['user'] = stats['user']
                    event['group'] = stats['group']
                    if object_type == 'file':
                        event['file_hash'] = alert['checksum']
                        event['file_hash_type'] = alert['checksum_type']

            else:  # Windows, win_pulsar
                change = alert['Accesses']
                if alert['Hash'] == 'Item is a directory':
                    object_type = 'directory'
                else:
                    object_type = 'file'

                actions = defaultdict(lambda: 'unknown')
                actions['Delete'] = 'deleted'
                actions['Read Control'] = 'read'
                actions['Write DAC'] = 'acl_modified'
                actions['Write Owner'] = 'modified'
                actions['Synchronize'] = 'modified'
                actions['Access Sys Sec'] = 'read'
                actions['Read Data'] = 'read'
                actions['Write Data'] = 'modified'
                actions['Append Data'] = 'modified'
                actions['Read EA'] = 'read'
                actions['Write EA'] = 'modified'
                actions['Execute/Traverse'] = 'read'
                actions['Read Attributes'] = 'read'
                actions['Write Attributes'] = 'acl_modified'
                actions['Query Key Value'] = 'read'
                actions['Set Key Value'] = 'modified'
                actions['Create Sub Key'] = 'created'
                actions['Enumerate Sub-Keys'] = 'read'
                actions['Notify About Changes to Keys'] = 'read'
                actions['Create Link'] = 'created'
                actions['Print'] = 'read'

                event['action'] = actions[change]
                event['change_type'] = 'filesystem'
                event['object_category'] = object_type
                event['object_path'] = alert['Object Name']
                event['file_name'] = os.path.basename(alert['Object Name'])
                event['file_path'] = os.path.dirname(alert['Object Name'])
                # TODO: Should we be reporting 'EntryType' or 'TimeGenerated?
                #   EntryType reports whether attempt to change was successful.

            event.update({'master': master})
            event.update({'minion_id': minion_id})
            event.update({'dest_host': fqdn})
            event.update({'dest_ip': fqdn_ip4})

            for cloud in clouds:
                event.update(cloud)

            payload.update({'host': fqdn})
            payload.update({'index': opts['index']})
            payload.update({'sourcetype': opts['sourcetype']})
            payload.update({'event': event})

            rdy = json.dumps(payload)
            requests.post('{}:{}/hubble/pulsar'.format(indexer, port),
                          rdy,
                          auth=HTTPBasicAuth(user, password))
    return