def retrieve_dynamo_as_config(env_vars: dict):
    region = env_vars.get("region")
    table_name = env_vars.get("table_name")

    # If TESTING, use local database connection, otherwise use default (config.DB_CONN_SERVERLESS)
    test_run = config.is_test_run()
    if test_run:
        dynamodb = automated.dynamodb.DynamoDB(region=region, table_name=table_name, db_conn=config.DB_CONN_LOCAL)
    else:
        dynamodb = automated.dynamodb.DynamoDB(region=region, table_name=table_name, db_conn=config.DB_CONN_SERVERLESS)

    response = dynamodb.retrieve_all_items_from_dynamo()
    converted_json = data.convert_dynamo_json_to_py_data(response)

    # TODO: Currently this outputs it as a JSON compatible HTTP response.
    #  Consider outputting to S3 Object (watch out for infinite loop on S3 PutObject).

    if test_run:
        # Output to file so we can test locally and see the response JSON
        data.write_json_to_file(converted_json, use_pretty_json=True)

    # TODO: Add error checking before sending good response

    return http_response.construct_http_response(
        status_code=http_response.OK,
        message=str(converted_json)
    )
Пример #2
0
 def __init__(self, region: str, ec2_conn: str = config.EC2_CONN_DEFAULT):
     """
     :param region: str - AWS region for connection endpoint
     :param ec2_conn: str - EC2 connection endpoint. Used for mock testing methods
     """
     self._region = region
     self.__ec2_conn = ec2_conn
     self.__ec2 = client("ec2", region_name=self._region)
     self._test_run = config.is_test_run()
     self.__errors = []
def put_config_into_dynamo(env_vars) -> dict:
    """
    Puts a JSON config of schedule and period items into DynamoDB
    :param env_vars: Environment variables retrieved from Lambda
    :return:
    """

    # received_event_bucket = event['requestParameters']['bucketName']
    # received_event_key = event['requestParameters']['Key']

    region: str = env_vars.get("region")
    table_name: str = env_vars.get("table_name")

    test_run: bool = config.is_test_run()

    if test_run:
        s3 = automated.s3.S3(s3_conn=config.S3_CONN_LOCAL)
        dynamodb = automated.dynamodb.DynamoDB(region=region,
                                               table_name=table_name,
                                               db_conn=config.DB_CONN_LOCAL)
    else:
        s3 = automated.s3.S3(s3_conn=config.S3_CONN_DEFAULT)
        dynamodb = automated.dynamodb.DynamoDB(
            region=region,
            table_name=table_name,
            db_conn=config.DB_CONN_SERVERLESS)

    s3_object_data: str = s3.retrieve_data_from_s3_object()
    validated_json: list = data.validate_json(s3_object_data)
    converted_json: list = data.convert_json_to_dynamo_json(validated_json)

    logger.info(
        "Config JSON to DynamoDB compatible JSON conversion successful")

    response: dict = dynamodb.load_json_into_db(converted_json)
    response_status_code: str = response.get('ResponseMetadata').get(
        'HTTPStatusCode')

    # This returns a set or dict, not a list? We can change this to be something other than length, depending on..
    # TODO: Implement logic to handle batch writes that need to pass unprocessed items
    if len(response.get('UnprocessedItems')):
        logger.info("FOUND UNPROCESSED ITEMS... FIGURE OUT WHAT TO DO NEXT!")
        logger.info(f"Unprocessed Items: {response.get('UnprocessedItems')}")

    elif len(response.get('UnprocessedItems')
             ) == 0 and response_status_code == http_response.OK:
        logger.info("Successfully loaded items into DynamoDB")
        return http_response.construct_http_response(
            status_code=http_response.OK,
            message=f"Success from '{events.type.API_S3_PUT_CONFIG}'")

    else:
        return http_response.construct_http_response(
            status_code=response_status_code, message=str(response))
Пример #4
0
    def __init__(self, region, table_name, db_conn=config.DB_CONN_SERVERLESS):
        self.__region = region
        self.__table_name = table_name
        self.__testing = config.is_test_run()
        self.__errors = []

        if self.__testing:
            logger.warning("[TESTING] Using local database connection.")
            self.dynamodb = client("dynamodb",
                                   region_name=self.__region,
                                   endpoint_url=config.DB_CONN_LOCAL_ENDPOINT)
            self.__testing_create_table()

        else:
            self.dynamodb = client(
                "dynamodb",
                region_name=self.__region,
            )
Пример #5
0
    def __init__(self, s3_conn=config.S3_CONN_DEFAULT):
        """

        """
        self.__s3_conn = s3_conn
        self.__testing = config.is_test_run()
        self._s3 = resource('s3')
        self.__errors = []

        if self.__testing:
            logger.warning(f"[TESTING] Using mock S3 connection")

        try:
            self.__bucket = os.environ['scheduler_bucket_name']
            logger.debug(
                f"Discovered S3 bucket name <{self.__bucket}> from environment variable 'scheduler_bucket_name'"
            )
        except KeyError:
            automated.exceptions.log_error(
                automation_component=self,
                error_message=
                f"Error: No bucket name found. Please set 'scheduler_bucket_name' environment variable.",
                output_to_logger=True,
                include_in_http_response=True,
                fatal_error=True)

        try:
            self.__s3_config_object_key = os.environ[
                'scheduler_s3_config_object_key']
            logger.debug(
                f"Discovered S3 object key name [{self.__s3_config_object_key}] from environment variable "
                f"'scheduler_s3_config_object_key")
        except KeyError:
            automated.exceptions.log_error(
                automation_component=self,
                error_message=
                f"Error: No object key found. Please set 'scheduler_s3_config_object_key' env variable.",
                output_to_logger=True,
                include_in_http_response=True,
                fatal_error=True)
Пример #6
0
    def __init__(self, env_vars):
        self._region: str = env_vars.get("region")
        self._tag_key: str = env_vars.get("tag_key")
        self._table_name: str = env_vars.get("table_name")
        self._test_run: bool = config.is_test_run()
        self.__errors: list = []

        if self._test_run:
            self.__ec2: automated.ec2.EC2 = automated.ec2.EC2(
                region=self._region, ec2_conn=config.EC2_CONN_LOCAL)
            self.__dynamo_db = automated.dynamodb.DynamoDB(
                region=self._region,
                table_name=self._table_name,
                db_conn=config.DB_CONN_LOCAL)

        elif not self._test_run:
            self.__ec2 = automated.ec2.EC2(region=self._region,
                                           ec2_conn=config.EC2_CONN_DEFAULT)
            self.__dynamo_db = automated.dynamodb.DynamoDB(
                region=self._region,
                table_name=self._table_name,
                db_conn=config.DB_CONN_SERVERLESS)

        self.__evaluator = util.evalperiod.EvalPeriod()
    '''
    TODO: Consider making all ec2 start/stop calls a single batch API call rather than individual start/stop
        The downside to this is fatal errors and timeouts will have to be considered more or all actions will fail
    TODO: send JSON payload directly via API GW for placing config to DynamoDB instead of uploading config to S3
    CONSIDER: API call for check mode to output what actions would happen to what resource over day or time. 
            Would give feedback to check if scheduling behavior will work as intended.
    '''

    automated_event_handler = util.eventhandler.AutomationEventHandler(event, context)
    http_response: dict = automated_event_handler.evaluate_event()
    log_http_response(http_response=http_response)

    return http_response


def log_http_response(http_response: dict) -> None:
    if config.USE_PRETTY_JSON:
        logger.info(f"Response:\n{util.data.human_readable_json(http_response)}")
    else:
        logger.info(f"Response:\n{util.data.machine_readable_json(http_response)}")


# TESTING: For local testing. Launch point.
if __name__ == '__main__':
    if config.is_test_run():
        event_handler({'detail-type': config.TESTING_EVENT}, {})
    else:
        event_handler({'detail-type': 'Scheduled Event'}, {})
        # event_handler({'detail-type': 'AWS API Call via CloudTrail', 'detail': {'eventName': 'PutObject'}}, {})
 def __init__(self, event: dict, context: dict):
     self.__event: dict = event
     self.__context: dict = context
     self.__errors: list = []
     self._test_run: bool = config.is_test_run()