예제 #1
0
파일: cli.py 프로젝트: vitalvas/parsedmarc
 def process_reports(reports_):
     output_str = "{0}\n".format(
         json.dumps(reports_, ensure_ascii=False, indent=2))
     if not args.silent:
         print(output_str)
     if args.kafka_hosts:
         try:
             kafka_client = kafkaclient.KafkaClient(args.kafka_hosts)
         except Exception as error_:
             logger.error("Kafka Error: {0}".format(error_.__str__()))
     if args.save_aggregate:
         for report in reports_["aggregate_reports"]:
             try:
                 if args.elasticsearch_host:
                     elastic.save_aggregate_report_to_elasticsearch(
                         report, index=es_aggregate_index)
             except elastic.AlreadySaved as warning:
                 logger.warning(warning.__str__())
             except elastic.ElasticsearchError as error_:
                 logger.error("Elasticsearch Error: {0}".format(
                     error_.__str__()))
             try:
                 if args.kafka_hosts:
                     kafka_client.save_aggregate_reports_to_kafka(
                         report, kafka_aggregate_topic)
             except Exception as error_:
                 logger.error("Kafka Error: {0}".format(error_.__str__()))
         if args.hec:
             try:
                 aggregate_reports_ = reports_["aggregate_reports"]
                 if len(aggregate_reports_) > 0:
                     hec_client.save_aggregate_reports_to_splunk(
                         aggregate_reports_)
             except splunk.SplunkError as e:
                 logger.error("Splunk HEC error: {0}".format(e.__str__()))
     if args.save_forensic:
         for report in reports_["forensic_reports"]:
             try:
                 if args.elasticsearch_host:
                     elastic.save_forensic_report_to_elasticsearch(
                         report, index=es_forensic_index)
             except elastic.AlreadySaved as warning:
                 logger.warning(warning.__str__())
             except elastic.ElasticsearchError as error_:
                 logger.error("Elasticsearch Error: {0}".format(
                     error_.__str__()))
             try:
                 if args.kafka_hosts:
                     kafka_client.save_forensic_reports_to_kafka(
                         report, kafka_forensic_topic)
             except Exception as error_:
                 logger.error("Kafka Error: {0}".format(error_.__str__()))
         if args.hec:
             try:
                 forensic_reports_ = reports_["forensic_reports"]
                 if len(forensic_reports_) > 0:
                     hec_client.save_forensic_reports_to_splunk(
                         forensic_reports_)
             except splunk.SplunkError as e:
                 logger.error("Splunk HEC error: {0}".format(e.__str__()))
예제 #2
0
파일: cli.py 프로젝트: chysteria/parsedmarc
 def process_reports(reports_):
     output_str = "{0}\n".format(
         json.dumps(reports_, ensure_ascii=False, indent=2))
     if not opts.silent:
         print(output_str)
     if opts.kafka_hosts:
         try:
             ssl_context = None
             if opts.kafka_skip_certificate_verification:
                 logger.debug("Skipping Kafka certificate verification")
                 ssl_context = create_default_context()
                 ssl_context.check_hostname = False
                 ssl_context.verify_mode = CERT_NONE
             kafka_client = kafkaclient.KafkaClient(
                 opts.kafka_hosts,
                 username=opts.kafka_username,
                 password=opts.kafka_password,
                 ssl_context=ssl_context)
         except Exception as error_:
             logger.error("Kafka Error: {0}".format(error_.__str__()))
     if opts.save_aggregate:
         for report in reports_["aggregate_reports"]:
             try:
                 if opts.elasticsearch_hosts:
                     shards = opts.elasticsearch_number_of_shards
                     replicas = opts.elasticsearch_number_of_replicas
                     elastic.save_aggregate_report_to_elasticsearch(
                         report,
                         index_suffix=opts.elasticsearch_index_suffix,
                         monthly_indexes=opts.elasticsearch_monthly_indexes,
                         number_of_shards=shards,
                         number_of_replicas=replicas)
             except elastic.AlreadySaved as warning:
                 logger.warning(warning.__str__())
             except elastic.ElasticsearchError as error_:
                 logger.error("Elasticsearch Error: {0}".format(
                     error_.__str__()))
             try:
                 if opts.kafka_hosts:
                     kafka_client.save_aggregate_reports_to_kafka(
                         report, kafka_aggregate_topic)
             except Exception as error_:
                 logger.error("Kafka Error: {0}".format(error_.__str__()))
         if opts.hec:
             try:
                 aggregate_reports_ = reports_["aggregate_reports"]
                 if len(aggregate_reports_) > 0:
                     hec_client.save_aggregate_reports_to_splunk(
                         aggregate_reports_)
             except splunk.SplunkError as e:
                 logger.error("Splunk HEC error: {0}".format(e.__str__()))
     if opts.save_forensic:
         for report in reports_["forensic_reports"]:
             try:
                 shards = opts.elasticsearch_number_of_shards
                 replicas = opts.elasticsearch_number_of_replicas
                 if opts.elasticsearch_hosts:
                     elastic.save_forensic_report_to_elasticsearch(
                         report,
                         index_suffix=opts.elasticsearch_index_suffix,
                         monthly_indexes=opts.elasticsearch_monthly_indexes,
                         number_of_shards=shards,
                         number_of_replicas=replicas)
             except elastic.AlreadySaved as warning:
                 logger.warning(warning.__str__())
             except elastic.ElasticsearchError as error_:
                 logger.error("Elasticsearch Error: {0}".format(
                     error_.__str__()))
             except InvalidDMARCReport as error_:
                 logger.error(error_.__str__())
             try:
                 if opts.kafka_hosts:
                     kafka_client.save_forensic_reports_to_kafka(
                         report, kafka_forensic_topic)
             except Exception as error_:
                 logger.error("Kafka Error: {0}".format(error_.__str__()))
         if opts.hec:
             try:
                 forensic_reports_ = reports_["forensic_reports"]
                 if len(forensic_reports_) > 0:
                     hec_client.save_forensic_reports_to_splunk(
                         forensic_reports_)
             except splunk.SplunkError as e:
                 logger.error("Splunk HEC error: {0}".format(e.__str__()))
예제 #3
0
 def process_reports(reports_):
     output_str = "{0}\n".format(
         json.dumps(reports_, ensure_ascii=False, indent=2))
     if not opts.silent:
         print(output_str)
     if opts.kafka_hosts:
         try:
             kafka_client = kafkaclient.KafkaClient(
                 opts.kafka_hosts,
                 username=opts.kafka_username,
                 password=opts.kafka_password)
         except Exception as error_:
             logger.error("Kafka Error: {0}".format(error_.__str__()))
     if opts.save_aggregate:
         for report in reports_["aggregate_reports"]:
             try:
                 if opts.elasticsearch_hosts:
                     elastic.save_aggregate_report_to_elasticsearch(
                         report,
                         index_suffix=opts.elasticsearch_index_suffix,
                         monthly_indexes=opts.elasticsearch_monthly_indexes)
             except elastic.AlreadySaved as warning:
                 logger.warning(warning.__str__())
             except elastic.ElasticsearchError as error_:
                 logger.error("Elasticsearch Error: {0}".format(
                     error_.__str__()))
             try:
                 if opts.kafka_hosts:
                     kafka_client.save_aggregate_reports_to_kafka(
                         report, kafka_aggregate_topic)
             except Exception as error_:
                 logger.error("Kafka Error: {0}".format(error_.__str__()))
         if opts.hec:
             try:
                 aggregate_reports_ = reports_["aggregate_reports"]
                 if len(aggregate_reports_) > 0:
                     hec_client.save_aggregate_reports_to_splunk(
                         aggregate_reports_)
             except splunk.SplunkError as e:
                 logger.error("Splunk HEC error: {0}".format(e.__str__()))
     if opts.save_forensic:
         for report in reports_["forensic_reports"]:
             try:
                 if opts.elasticsearch_hosts:
                     elastic.save_forensic_report_to_elasticsearch(
                         report,
                         index_suffix=opts.elasticsearch_index_suffix,
                         monthly_indexes=opts.elasticsearch_monthly_indexes)
             except elastic.AlreadySaved as warning:
                 logger.warning(warning.__str__())
             except elastic.ElasticsearchError as error_:
                 logger.error("Elasticsearch Error: {0}".format(
                     error_.__str__()))
             except InvalidDMARCReport as error_:
                 logger.error(error_.__str__())
             try:
                 if opts.kafka_hosts:
                     kafka_client.save_forensic_reports_to_kafka(
                         report, kafka_forensic_topic)
             except Exception as error_:
                 logger.error("Kafka Error: {0}".format(error_.__str__()))
         if opts.hec:
             try:
                 forensic_reports_ = reports_["forensic_reports"]
                 if len(forensic_reports_) > 0:
                     hec_client.save_forensic_reports_to_splunk(
                         forensic_reports_)
             except splunk.SplunkError as e:
                 logger.error("Splunk HEC error: {0}".format(e.__str__()))