def test_check_fired_webhook_event(self, hge_ctx, scheduled_triggers_evts_webhook): q = { "type": "create_cron_trigger", "args": { "name": "test_cron_trigger", "webhook": "{{SCHEDULED_TRIGGERS_WEBHOOK_DOMAIN}}" + "/test", "schedule": "* * * * *", "headers": [{ "name": "header-key", "value": "header-value" }], "payload": { "foo": "baz" }, "include_in_metadata": False } } st, resp = hge_ctx.v1q(q) assert st == 200, resp # The maximum timeout is set to 75s because, the cron timestamps # that are generated will start from the next minute, suppose # the cron schedule is "* * * * *" and the time the cron trigger # is created is 10:00:00, then the next event will be scheduled # at 10:01:00, but the events processor will not process it # exactly at the zeroeth second of 10:01. The only guarantee # is that, the event processor will start to process the event before # 10:01:10 (seel sleep in processScheduledTriggers). So, in the worst # case, it will take 70 seconds to process the first scheduled event. event = scheduled_triggers_evts_webhook.get_event(75) validate_event_webhook(event['path'], '/test') validate_event_headers(event['headers'], {"header-key": "header-value"}) assert event['body']['payload'] == {"foo": "baz"} assert event['body']['name'] == 'test_cron_trigger'
def test_check_fired_webhook_events(self,hge_ctx,scheduled_triggers_evts_webhook): # Collect the three generated events (they may arrive out of order): e1 = scheduled_triggers_evts_webhook.get_event(12) # at least 10 sec, see processScheduledTriggers.sleep e2 = scheduled_triggers_evts_webhook.get_event(12) e3 = scheduled_triggers_evts_webhook.get_event(12) [event_fail1, event_fail2, event_success] = sorted([e1,e2,e3], key=lambda e: e['path']) # Check the two failures: validate_event_webhook(event_fail1['path'],'/fail') validate_event_webhook(event_fail2['path'],'/fail') # Check the one successful webhook call: query = { "type":"run_sql", "args":{ "sql":''' select timezone('utc',created_at) as created_at from hdb_catalog.hdb_scheduled_events where comment = 'test scheduled event'; ''' } } st, resp = hge_ctx.v1q(query) assert st == 200, resp db_created_at = resp['result'][1][0] validate_event_webhook(event_success['path'],'/test') validate_event_headers(event_success['headers'],{"header-key":"header-value"}) assert event_success['body']['payload'] == self.webhook_payload assert event_success['body']['created_at'] == db_created_at.replace(" ","T") + "Z" payload_keys = dict.keys(event_success['body']) for k in ["scheduled_time","created_at","id"]: # additional keys assert k in payload_keys assert scheduled_triggers_evts_webhook.is_queue_empty()
def test_check_fired_webhook_event(self, hge_ctx, scheduled_triggers_evts_webhook): event = scheduled_triggers_evts_webhook.get_event(65) validate_event_webhook(event['path'], '/test') validate_event_headers(event['headers'], {"header-key": "header-value"}) assert event['body'] == self.webhook_payload assert scheduled_triggers_evts_webhook.is_queue_empty()
def test_check_fired_webhook_event(self, hge_ctx, scheduled_triggers_evts_webhook): event = scheduled_triggers_evts_webhook.get_event(65) validate_event_webhook(event['path'], '/test') validate_event_headers(event['headers'], {"header-key": "header-value"}) assert event['body']['payload'] == self.webhook_payload payload_keys = dict.keys(event['body']) for k in ["scheduled_time", "created_at", "id"]: # additional keys assert k in payload_keys assert scheduled_triggers_evts_webhook.is_queue_empty()
def test_check_fired_webhook_event(self,hge_ctx,scheduled_triggers_evts_webhook): query = { "type":"run_sql", "args":{ "sql":''' select timezone('utc',created_at) as created_at from hdb_catalog.hdb_scheduled_events where comment = 'test scheduled event'; ''' } } st, resp = hge_ctx.v1q(query) assert st == 200, resp db_created_at = resp['result'][1][0] event = scheduled_triggers_evts_webhook.get_event(65) validate_event_webhook(event['path'],'/test') validate_event_headers(event['headers'],{"header-key":"header-value"}) assert event['body']['payload'] == self.webhook_payload assert event['body']['created_at'] == db_created_at.replace(" ","T") + "Z" payload_keys = dict.keys(event['body']) for k in ["scheduled_time","created_at","id"]: # additional keys assert k in payload_keys assert scheduled_triggers_evts_webhook.is_queue_empty()
def test_scheduled_events(self, hge_ctx, scheduled_triggers_evts_webhook): query = { "type": "bulk", "args": [ # Succeeds { "type": "create_scheduled_event", "args": { "webhook": '{{SCHEDULED_TRIGGERS_WEBHOOK_DOMAIN}}/test', "schedule_at": stringify_datetime(datetime.utcnow()), "payload": self.webhook_payload, "headers": self.header_conf, "comment": "test scheduled event" } }, # Fails immediately, with 'dead' { "type": "create_scheduled_event", "args": { "webhook": "{{SCHEDULED_TRIGGERS_WEBHOOK_DOMAIN}}/", "schedule_at": "2020-01-01T00:00:00Z", "payload": self.webhook_payload, "headers": self.header_conf } }, # Fails on request, trying twice: { "type": "create_scheduled_event", "args": { "webhook": self.webhook_domain + '/fail', "schedule_at": stringify_datetime(datetime.utcnow()), "payload": self.webhook_payload, "headers": self.header_conf, "retry_conf": { "num_retries": 1, "retry_interval_seconds": 1, "timeout_seconds": 1, "tolerance_seconds": 21600 } } } ] } st, resp = hge_ctx.v1q(query) assert st == 200, resp assert len(resp) == 3, resp # ensuring that valid event_id is returned for all requests assert all(['event_id' in r for r in resp]), resp # Here we check the three requests received by the webhook. # Collect the three generated events (they may arrive out of order): e1 = scheduled_triggers_evts_webhook.get_event( 12) # at least 10 sec, see processScheduledTriggers.sleep e2 = scheduled_triggers_evts_webhook.get_event(12) e3 = scheduled_triggers_evts_webhook.get_event(12) [event_fail1, event_fail2, event_success] = sorted([e1, e2, e3], key=lambda e: e['path']) # Check the two failures: validate_event_webhook(event_fail1['path'], '/fail') validate_event_webhook(event_fail2['path'], '/fail') # Check the one successful webhook call: query = { "type": "run_sql", "args": { "sql": ''' select timezone('utc',created_at) as created_at from hdb_catalog.hdb_scheduled_events where comment = 'test scheduled event'; ''' } } st, resp = hge_ctx.v1q(query) assert st == 200, resp db_created_at = resp['result'][1][0] validate_event_webhook(event_success['path'], '/test') validate_event_headers(event_success['headers'], {"header-key": "header-value"}) assert event_success['body']['payload'] == self.webhook_payload assert event_success['body']['created_at'] == db_created_at.replace( " ", "T") + "Z" payload_keys = dict.keys(event_success['body']) for k in ["scheduled_time", "created_at", "id"]: # additional keys assert k in payload_keys assert scheduled_triggers_evts_webhook.is_queue_empty() def try_check_events_statuses(): query = { "type": "run_sql", "args": { "sql": "select status,tries from hdb_catalog.hdb_scheduled_events order by status desc" } } st, resp = hge_ctx.v1q(query) assert st == 200, resp scheduled_event_statuses = resp['result'] # 3 scheduled events have been created # one should be dead because the timestamp was past the tolerance limit # one should be delivered because all the parameters were reasonable # one should be error because the webhook returns an error state assert scheduled_event_statuses == [ ['status', 'tries'], ['error', '2'], # num_retries + 1 ['delivered', '1'], ['dead', '0'] ], resp until_asserts_pass(100, try_check_events_statuses)