def test_all_private_ips_and_shards_matching_clickhouse_server_added_to_zookeeper(
        self,
        mock_get_ec2_client,
        mock_get_zookeeper_client,
        mock_get_clickhouse_cluster_definition,
    ):
        ec2_client = MagicMock()
        mock_get_ec2_client.return_value = ec2_client
        shard_config = {
            "shard_1": ["172.26.39.30", "172.26.99.237"],
            "shard_2": ["172.26.32.16", "172.26.97.29"],
        }
        mock_get_clickhouse_cluster_definition.return_value = shard_config
        zookeeper = MagicMock()
        mock_get_zookeeper_client.return_value = zookeeper
        lambda_context = LambdaContext()
        lambda_context.function_name = "lambda_handler"
        lambda_context.aws_request_id = "abc-123"

        result = lambda_handler({}, lambda_context)

        remote_servers_xml = b"""<hmrc_data_cluster>
  <shard>
    <internal_replication>true</internal_replication>
    <replica>
      <default_database>graphite</default_database>
      <host>172.26.39.30</host>
      <port>9000</port>
    </replica>
    <replica>
      <default_database>graphite</default_database>
      <host>172.26.99.237</host>
      <port>9000</port>
    </replica>
  </shard>
  <shard>
    <internal_replication>true</internal_replication>
    <replica>
      <default_database>graphite</default_database>
      <host>172.26.32.16</host>
      <port>9000</port>
    </replica>
    <replica>
      <default_database>graphite</default_database>
      <host>172.26.97.29</host>
      <port>9000</port>
    </replica>
  </shard>
</hmrc_data_cluster>"""

        zookeeper.set.assert_any_call("clickhouse.config.remote_servers",
                                      remote_servers_xml)
        self.assertEqual(result, {"cluster_definition": shard_config})
def mock_context():
    context = LambdaContext()
    context.function_name = 'test'
    context.function_version = 'test'
    context.invoked_function_arn = 'test'
    context.memory_limit_in_mb = 'test'
    context.aws_request_id = 'test'
    context.log_group_name = 'test'
    context.log_stream_name = 'test'
    return context
    def test_should_ensure_zookeeper_path_exists_for_remote_servers(
        self,
        mock_get_ec2_client,
        mock_get_zookeeper_client,
        mock_get_clickhouse_cluster_definition,
    ):
        zookeeper = MagicMock()
        mock_get_zookeeper_client.return_value = zookeeper
        lambda_context = LambdaContext()
        lambda_context.function_name = "lambda_handler"
        lambda_context.aws_request_id = "abc-123"

        lambda_handler({}, lambda_context)

        zookeeper.ensure_path.assert_any_call(
            "clickhouse.config.remote_servers")
Ejemplo n.º 4
0
def test_start_machine_handler(monkeypatch, mocker):
    step_machine = Mock()
    step_machine.list_state_machines.return_value = {
        "stateMachines": [{
            "name": "Rage Against The Machine",
            "stateMachineArn": "arn::killing.in.the.name",
        }]
    }
    step_machine.start_execution.return_value = "Lights out! Guerilla Radio!"
    mocker.patch.object(step_functions.boto3,
                        "client",
                        return_value=step_machine)
    monkeypatch.setenv("STEP_MACHINE_NAME", "Rage Against The Machine")

    payload = json.dumps({"Bulls": "On Parade"})
    event = {"body": payload}
    response = start_machine(event, LambdaContext())

    assert response == {
        "statusCode": 200,
        "body": '{"response": "Lights out! Guerilla Radio!"}',
        "headers": {
            "Access-Control-Allow-Origin": "*"
        },
    }

    step_machine.start_execution.assert_called_once_with(
        input=payload, stateMachineArn="arn::killing.in.the.name")
def test_that_the_lambda_handler_succeeds_with_context(sns_event):
    lambda_context = LambdaContext()
    lambda_context.function_name = 'lambda_handler'
    lambda_context.aws_request_id = 'abc-123'

    ecs_client = boto3.client('ecs')

    ecs_cluster_name = get_ecs_cluster_name()
    riemann_consumer_ecs_service_name = get_riemann_consumer_ecs_service_name()
    ecs_client.create_cluster(clusterName=ecs_cluster_name)
    ecs_client.create_service(cluster=ecs_cluster_name,
                              serviceName=riemann_consumer_ecs_service_name)

    response = lambda_handler(event=sns_event, context=lambda_context)

    assert response['success'] is True
    assert response['serviceName'] == riemann_consumer_ecs_service_name
    assert response['status'] == "ACTIVE"
Ejemplo n.º 6
0
def keepalive_fn(scheduler: sched.scheduler, params: inputs.Inputs,
                 context: LambdaContext, keepalive_state: KeepaliveState,
                 cache: Cache):
    ''' Each iteration of keepalive_thread runs this code. Add the next iteration of keepalive before exiting to
    continue the keepalive thread. Otherwise keepalives will stop '''
    try:
        update_keepalive(params, keepalive_state, cache)
        keepalive_fn.num_keepalives += 1
        if keepalive_fn.num_keepalives % defaults.KEEPALIVE_PRINT_EVERY == 0:
            print("keepalive_fn: keepalive #{}: state={}".format(
                keepalive_fn.num_keepalives, keepalive_state))

        if context.invoked_function_arn and context.get_remaining_time_in_millis(
        ) < defaults.RETRIGGER_BEFORE_EXPIRY_MS:
            # if invoked as lambda (not CLI), then retrigger backing job if this instance of it will expire soon
            cache_keys = keepalive_state.cache_keys
            lastaccess_ms = int(cache.get(cache_keys.lastaccess))
            lastaccess_age_ms = utils.millitime() - lastaccess_ms

            if lastaccess_age_ms > (defaults.BACKING_JOB_LIFETIME_MS * 0.9):
                # There were no recent calls to fetch the data produced by this backing job. No need to re-issue
                print(
                    "Exiting backing job by ending keepalive thread. lastaccess_age_ms = ",
                    lastaccess_age_ms)
                return False

            if not params.is_streaming():
                ''' Fixed time-range jobs need not be reissued '''
                print(
                    "keepalive_fn: backing job won't be restarted because it is not a streaming job",
                    params)
                return False

            # Restart this job again in another lambda invocation.
            # Before doing that, don't keepalive for a while to make it stale. Otherwise the new invocation
            # will assume there is another backing job already running and will auto-exit
            print(
                "keepalive_fn: backing job needs to be restarted. lastaccess_age_ms =",
                lastaccess_age_ms)
            time.sleep(defaults.KEEPALIVE_INTERVAL_SEC *
                       defaults.KEEPALIVE_EXPIRY_MULTIPLE)
            start_backing_job_if_necessary(params, context, keepalive_state,
                                           cache)
            print(
                "keepalive_fn: exiting current backing job after re-issuing a new one"
            )
            return False
    except Exception as e:
        print("keepalive_fn: exception", e, traceback.format_exc())

    # schedule the next iteration of keepalive thread
    scheduler.enter(defaults.KEEPALIVE_INTERVAL_SEC,
                    1,
                    keepalive_fn,
                    argument=(scheduler, params, context, keepalive_state,
                              cache))
Ejemplo n.º 7
0
def lambda_context():
        context = LambdaContext()
        context.aws_request_id = 'test_aws_request_id'
        context.function_name = 'emr_create_function_name'
        context.log_group_name = 'log_group_name'
        context.log_stream_name = 'log_stream_name'
        return context
Ejemplo n.º 8
0
def test_health_check():
    response = health_check({}, LambdaContext())
    expected_response = {
        "statusCode": StatusCode.OK.value,
        "headers": {"Access-Control-Allow-Origin": "*"},
        "body": json.dumps(
            {
                "status": "OK",
                "project": settings["project.name"],
                "environment": settings.stela_options.current_environment,
                "datetime": arrow.utcnow().isoformat(),
            }
        ),
    }

    assert response == expected_response
Ejemplo n.º 9
0
        parser.add_argument(
            "--cache_url",
            required=False,
            default="",
            help="memcache configuration url is host:port form")
        parser.add_argument("--daemon", action="store_true")
        args = parser.parse_args()

        test_event = {
            "program": args.program,
            "api_token": args.token,
            "cache_url": args.cache_url,
            "daemon": args.daemon
        }
        if args.endpoint:
            test_event["api_endpoint"] = args.endpoint
        if args.start_time_ms:
            test_event["start_time_ms"] = args.start_time_ms
        if args.end_time_ms:
            test_event["end_time_ms"] = args.end_time_ms
        if args.resolution_hint_ms:
            test_event["resolution_hint_ms"] = args.resolution_hint_ms
        context = LambdaContext()
        context.aws_request_id = utils.millitime()
        context.invoked_function_arn = None
        lambda_handler(test_event, context)

    except Exception as e:
        print("Exception", e, traceback.format_exc())
        os._exit(1)
Ejemplo n.º 10
0
def call_app(app):
  lambda_context = LambdaContext()
  return app.handler({}, lambda_context)
Ejemplo n.º 11
0
    "version":"0",
    "id":"9dbdce5f-29f1-1e4a-119e-08ea62e15ce4",
    "detail-type":"RDS DB Instance Event",
    "source":"aws.rds",
    "account":"929976461491",
    "time":"2021-03-10T01:21:29Z",
    "region":"us-east-1",
    "resources":[
        "arn:aws:rds:us-east-1:929976461491:db:database-4"
    ],
    "detail":{
        "EventCategories":[
            "creation"
        ],
        "SourceType":"DB_INSTANCE",
        "SourceArn":"arn:aws:rds:us-east-1:929976461491:db:database-4",
        "Date":"2021-03-10T01:21:29.548Z",
        "Message":"DB instance created",
        "SourceIdentifier":"database-4",
        "EventID":"RDS-EVENT-0005"
    }
}


context = LambdaContext()


response = lambda_handler(event, context)


Ejemplo n.º 12
0
def lambda_ctx() -> LambdaContext:
    lambda_cognito_identity = LambdaCognitoIdentity()
    lambda_cognito_identity.cognito_identity_id = "cognito_identity_id"
    lambda_cognito_identity.cognito_identity_pool_id = "cognito_identity_pool_id"

    lambda_client_context_mobile_client = LambdaClientContextMobileClient()
    lambda_client_context_mobile_client.installation_id = "installation_id"
    lambda_client_context_mobile_client.app_title = "app_title"
    lambda_client_context_mobile_client.app_version_name = "app_version_name"
    lambda_client_context_mobile_client.app_version_code = "app_version_code"
    lambda_client_context_mobile_client.app_package_name = "app_package_name"

    lambda_client_context = LambdaClientContext()
    lambda_client_context.client = lambda_client_context_mobile_client
    lambda_client_context.custom = {"custom": True}
    lambda_client_context.env = {"env": "test"}

    lambda_context = LambdaContext()
    lambda_context.function_name = "function_name"
    lambda_context.function_version = "function_version"
    lambda_context.invoked_function_arn = "invoked_function_arn"
    lambda_context.memory_limit_in_mb = 1234
    lambda_context.aws_request_id = "aws_request_id"
    lambda_context.log_group_name = "log_group_name"
    lambda_context.log_stream_name = "log_stream_name"
    lambda_context.identity = lambda_cognito_identity
    lambda_context.client_context = lambda_client_context

    return lambda_context
Ejemplo n.º 13
0
import time
import unittest
from typing import Any, Dict, List, Optional
from unittest import mock

import boto3
import yaml
from aws_lambda_context import LambdaContext
from moto import mock_dynamodb2

from bibliophile import goodreads
from bibliophile.goodreads.types import Book

from ..read_shelf import handler

dummy_context = LambdaContext()

JsonDict = Dict[str, Any]


class HandlerTest(unittest.TestCase):
    """ Test the lambda function handler which reads from Goodreads shelves. """
    def test_null_body(self):
        """ If there is no POST data at all, we handle that. """
        response = handler({'body': None}, context=dummy_context)
        self.assertEqual(
            response,
            {
                'statusCode': 400,
                'headers': {
                    'Content-Type': 'application/json'