def test_get_metrics_from_finding(mocker):

    expected_response = {
        'generator_id':
        'arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.3',
        'type':
        '1.3 Ensure credentials unused for 90 days or greater are disabled',
        'productArn':
        'arn:aws:securityhub:' + my_region + '::product/aws/securityhub',
        'finding_triggered_by': 'unit-test',
        'region': mocker.ANY
    }

    finding = utils.load_test_data(test_data + 'cis_1-3-iamuser1.json',
                                   my_region).get('detail').get('findings')[0]

    ssmc = boto3.client('ssm', region_name=my_region)
    ssmc_s = Stubber(ssmc)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_uuid)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_version)
    ssmc_s.activate()

    mocker.patch('lib.metrics.Metrics.connect_to_ssm', return_value=ssmc)

    metrics = Metrics({"detail-type": "unit-test"})

    assert metrics.get_metrics_from_finding(finding) == expected_response
Ejemplo n.º 2
0
 def send_execution_data(self):
     try:
         self.logger.info("Executing: " + self.__class__.__name__ + "/" + inspect.stack()[0][3])
         send = Metrics(self.logger)
         data = {"StateMachineExecutionCount": "1"}
         send.metrics(data)
         return self.event
     except:
         return self.event
Ejemplo n.º 3
0
def lambda_handler(event, context):

    LOGGER.debug(event)
    metrics = Metrics(event)
    try:
        for finding_rec in event['detail']['findings']:
            finding = Finding(finding_rec)
            remediate(finding, metrics.get_metrics_from_finding(finding_rec))
    except Exception as e:
        LOGGER.error(e)

    APPLOGGER.flush()  # flush the buffer to CW Logs
Ejemplo n.º 4
0
 def __init__(self, logger, sm_input_list):
     self.logger = logger
     self.sm_input_list = sm_input_list
     self.list_sm_exec_arns = []
     self.stack_set_exist = True
     self.solution_metrics = Metrics(logger)
     self.param_handler = CFNParamsHandler(logger)
     self.state_machine = StateMachine(logger)
     self.stack_set = StackSet(logger)
     self.s3 = S3(logger)
     self.wait_time = os.environ.get('WAIT_TIME')
     self.execution_mode = os.environ.get('EXECUTION_MODE')
def test_metrics_construction(mocker):

    ssmc = boto3.client('ssm', region_name=my_region)
    ssmc_s = Stubber(ssmc)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_uuid)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_version)
    ssmc_s.activate()

    mocker.patch('lib.metrics.Metrics.connect_to_ssm', return_value=ssmc)

    metrics = Metrics({"detail-type": "unit-test"})

    assert metrics.solution_uuid == "12345678-1234-1234-1234-123412341234"
    assert metrics.solution_version == "v1.2.0TEST"
 def send_tgw_peering_anonymous_data(self) -> any:
     final_states = ['available', 'deleted']
     if self.event.get('TgwPeeringAttachmentState') in final_states:
         send = Metrics(self.logger)
         data = {
             "TgwPeeringState": self.event.get('TgwPeeringAttachmentState'),
             "Region": environ.get('AWS_REGION'),
             "PeerRegion": self.event.get('PeerRegion'),
             "RequestType": self.event.get('RequestType'),
             "TagEventSource": "TransitGateway",
             "SolutionVersion": environ.get('SOLUTION_VERSION')
         }
         return send.metrics(data)
     else:
         return None
 def __init__(self, logger, wait_time, manifest_file_path, sm_arn_scp,
              staging_bucket):
     self.state_machine = StateMachine(logger)
     self.s3 = S3(logger)
     self.send = Metrics(logger)
     self.param_handler = ParamsHandler(logger)
     self.logger = logger
     self.manifest_file_path = manifest_file_path
     self.manifest_folder = manifest_file_path[:-len(MANIFEST_FILE_NAME)]
     self.wait_time = wait_time
     self.sm_arn_scp = sm_arn_scp
     self.manifest = None
     self.list_sm_exec_arns = []
     self.nested_ou_delimiter = ""
     self.staging_bucket = staging_bucket
     self.root_id = None
Ejemplo n.º 8
0
    def metrics(self):
        text = None
        predicted = None

        if self.mode != "train":
            text = self.batch.text
            predicted = self.decoded_inferred_texts

        return Metrics(
            self.mode,
            self.loss,
            self.model_loss,
            self.fooling_loss,
            text,
            predicted
        )
Ejemplo n.º 9
0
 def send_metrics(self):
     try:
         self.put_ssm()
         self.logger.info(self.params)
         data = {
             "PrincipalType": self.params.get('PrincipalType'),
             "ApprovalNotificationFlag": self.params.get('ApprovalNotification'),
             "AuditTrailRetentionPeriod": self.params.get('AuditTrailRetentionPeriod'),
             "DefaultRoute": self.params.get('DefaultRoute'),
             "Region": get_region(),
             "SolutionVersion": self.params.get('SolutionVersion'),
             "CreatedNewTransitGateway": self.params.get(
                 'CreatedNewTransitGateway')
         }
         send = Metrics(self.logger)
         send.metrics(data)
     except Exception as e:
         self.logger.info(e)
         pass
Ejemplo n.º 10
0
 def __init__(self, logger, sm_arns_map, staging_bucket, manifest_file_path,
              pipeline_stage, token, execution_mode, primary_account_id):
     self.state_machine = StateMachine(logger)
     self.ssm = SSM(logger)
     self.s3 = S3(logger)
     self.send = Metrics(logger)
     self.param_handler = ParamsHandler(logger)
     self.logger = logger
     self.sm_arns_map = sm_arns_map
     self.manifest = None
     self.staging_bucket = staging_bucket
     self.manifest_file_path = manifest_file_path
     self.token = token
     self.pipeline_stage = pipeline_stage
     self.manifest_folder = manifest_file_path[:-len(MANIFEST_FILE_NAME)]
     if execution_mode.lower() == 'sequential':
         self.isSequential = True
     else:
         self.isSequential = False
     self.index = 100
     self.primary_account_id = primary_account_id
def test_send_metrics(mocker):

    expected_response = {
        'Solution': 'SO0111',
        'UUID': '12345678-1234-1234-1234-123412341234',
        'TimeStamp': mocker.ANY,
        'Data': {
            'generator_id':
            'arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.3',
            'type':
            '1.3 Ensure credentials unused for 90 days or greater are disabled',
            'productArn': mocker.ANY,
            'finding_triggered_by': 'unit-test',
            'region': mocker.ANY
        },
        'Version': 'v1.2.0TEST'
    }

    os.environ['sendAnonymousMetrics'] = 'Yes'

    finding = utils.load_test_data(test_data + 'cis_1-3-iamuser1.json',
                                   my_region).get('detail').get('findings')[0]

    ssmc = boto3.client('ssm', region_name=my_region)
    ssmc_s = Stubber(ssmc)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_uuid)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_version)
    ssmc_s.activate()

    mocker.patch('lib.metrics.Metrics.connect_to_ssm', return_value=ssmc)

    metrics = Metrics({"detail-type": "unit-test"})
    metrics_data = metrics.get_metrics_from_finding(finding)

    send_metrics = mocker.patch('lib.metrics.Metrics.post_metrics_to_api',
                                return_value=None)

    metrics.send_metrics(metrics_data)

    send_metrics.assert_called_with(expected_response)
 def __init__(self, logger, wait_time, manifest_file_path, sm_arn_stackset, staging_bucket, execution_mode):
     self.state_machine = StateMachine(logger)
     self.ssm = SSM(logger)
     self.s3 = S3(logger)
     self.send = Metrics(logger)
     self.param_handler = ParamsHandler(logger)
     self.logger = logger
     self.manifest_file_path = manifest_file_path
     self.manifest_folder = manifest_file_path[:-len(MANIFEST_FILE_NAME)]
     self.wait_time = wait_time
     self.sm_arn_stackset = sm_arn_stackset
     self.manifest = None
     self.list_sm_exec_arns = []
     self.staging_bucket = staging_bucket
     self.root_id = None
     self.uuid = uuid4()
     self.state_machine_event = {}
     if execution_mode.lower() == 'sequential':
         self.logger.info("Running {} mode".format(execution_mode))
         self.sequential_flag = True
     else:
         self.logger.info("Running {} mode".format(execution_mode))
         self.sequential_flag = False
Ejemplo n.º 13
0
        cam.take_photo()
        time.sleep(UPDATE_PHOTO_INTERVAL)


if __name__ == '__main__':
    logging.basicConfig(format='%(asctime)s %(message)s', level=LOG_LEVEL)

    logging.info('Starting...')

    cam = Camera()
    prop = Property()
    relays = Relays()
    triac_hat = TriacHat()
    sensors = Sensors()
    growing = Growing()
    metrics = Metrics()
    fan = Fan()
    light = Light()
    humidify = Humidify()
    weather = Weather()

    # Init start settings
    fan.init(triac_hat)

    start_prometheus_exporter(EXPORTER_SERVER_PORT)
    logging.debug('Prometheus exporter listen on 0.0.0.0:{port}'.format(port=EXPORTER_SERVER_PORT))

    update_metrics()
    light_control()
    fan_control()
    humidify_control()
Ejemplo n.º 14
0
from lib.metrics import Metrics
from lib.logger import Logger
from decimal import Decimal

log_level = 'info'
logger = Logger(loglevel=log_level)

send = Metrics(logger)


def test_backend_metrics():
    solution_id = 'SO_unit_test'
    data = {'key_string1': '2018-06-15',
            'key_string2': 'A1B2',
            'decimal': Decimal('5')
            }
    url = 'https://oszclq8tyh.execute-api.us-east-1.amazonaws.com/prod/generic'
    response = send.metrics(solution_id, data, url)
    logger.info(response)
    assert response == 200
Ejemplo n.º 15
0
def notify(finding, message, logger, cwlogs=False, sechub=True, sns=False):
    """
    Consolidates several outputs to a single call.

    Attributes
    ----------
    finding: finding object for which notification is to be done
    message: dict of notification data:
        {
            'Account': string,
            'AffectedOject': string,
            'Remediation': string,
            'State': string,
            'Note': string
        }
    logger: logger object for logging to stdout
    cwlogs: boolean - log to application log group?
    sechub: boolean - update Security Hub notes on the finding?
    sns: boolean - send to sns topic?
    """

    remediation_adj = ''
    if 'State' in message:
        if message['State'] == 'RESOLVED':
            remediation_adj = 'remediation was successful'
        elif message['State'] == 'INITIAL':
            remediation_adj = 'remediation started'
        elif message['State'] == 'FAILED':
            remediation_adj = 'remediation failed. Please remediate manually'
        if 'Note' not in message or not message['Note']:
            message['Note'] = '"' + message.get('Remediation', 'error missing remediation') +\
            '" ' + remediation_adj
    else:
        message['State'] = 'INFO'

    if 'Note' not in message or not message['Note']:
        message['Note'] = 'error - missing note'

    #send metrics
    try:
        metrics_data = message['metrics_data']
        metrics = Metrics({'detail-type': 'None'})
        metrics_data['status'] = message['State']
        metrics.send_metrics(metrics_data)
    except Exception as e:
        logger.error(e)
        logger.error('Failed to send metrics')

    # lambda logs - always
    logger.info(
        message.get('State', 'INFO') + ': ' + message.get('Note') +\
        ', Account Id: ' + message.get('Account', 'error') + \
        ', Resource: ' + message.get('AffectedObject', 'error')
    )

    # log to application log
    if cwlogs:
        # to take advantage of buffering, the caller controls the
        # connection.
        cwlogs.add_message(
            message.get('State') + ': ' + message.get('Note') +\
            ', Account Id: ' + message.get('Account', 'error') + \
            ', Resource: ' + message.get('AffectedObject', 'error')
        )

    if sechub:
        if message.get('State') == 'RESOLVED':
            finding.resolve(message.get('State') + ': ' + message.get('Note'))
        elif message.get('State') == 'INITIAL':
            finding.flag(message.get('State') + ': ' + message.get('Note'))
        else:
            finding.update_text(
                message.get('State', 'INFO') + ': ' + message.get('Note'))

    if sns:
        try:
            sns.postit('SO0111-SHARR_Topic', message, AWS_REGION)
        except Exception as e:
            logger.error(e)
            logger.error('Unable to send to sns')
Ejemplo n.º 16
0
import logging
from dateutil import parser
from flask import Flask, abort, jsonify, request, send_file

from lib.fan import Fan
from lib.humidify import Humidify
from lib.metrics import Metrics
from lib.growing import Growing
from flask_cors import CORS

from lib.properties import Property

from settings import PHOTOS_DIR

m = Metrics()
g = Growing()
p = Property()
app = Flask(__name__)
cors = CORS(app)
fan = Fan()
h = Humidify()


@app.errorhandler(404)
def resource_not_found(e):
    return jsonify(error=str(e)), 404


@app.route('/api/metrics/<metric>')
def metrics(metric):