def __init__(self, survey_consent_event): self.logger = survey_consent_event.get("logger", utils.get_logger()) self.correlation_id = survey_consent_event.get( "correlation_id", utils.new_correlation_id()) self.consent_dict = json.loads(survey_consent_event["body"]) self.consent_info_url = self.consent_dict["consent_info_url"] del self.consent_dict["consent_info_url"] consent_embedded_data_fieldname = "consent_statements" self.consent_dict[consent_embedded_data_fieldname] = json.loads( self.consent_dict[consent_embedded_data_fieldname]) self.to_recipient_email = self.consent_dict.get("to_recipient_email") try: self.template_name = self.consent_dict["template_name"] except KeyError: self.template_name = DEFAULT_CONSENT_EMAIL_TEMPLATE else: del self.consent_dict["template_name"] try: self.consent_dict[ "consent_datetime"] = qualtrics2thiscovery_timestamp( self.consent_dict["consent_datetime"]) except KeyError: self.consent_dict["consent_datetime"] = str(utils.now_with_tz()) self.core_api_client = CoreApiClient( correlation_id=self.correlation_id) self.consent = Consent(core_api_client=self.core_api_client, correlation_id=self.correlation_id) self.consent.from_dict(consent_dict=self.consent_dict)
def set_user_task_completed(ut_id, correlation_id=None): utils.validate_uuid(ut_id) # check that user_task exists result = get_user_task(ut_id, correlation_id) if len(result) == 0: errorjson = { "user_task_id": ut_id, "correlation_id": str(correlation_id) } raise utils.ObjectDoesNotExistError("user task does not exist", errorjson) if result[0]["project_task_status"] not in ["planned", "testing"]: updated_rows_count = execute_non_query( sql_q.UPDATE_USER_TASK_STATUS, ( "complete", str(utils.now_with_tz()), str(ut_id), ), correlation_id, ) assert ( updated_rows_count == 1 ), f"Failed to update status of user task {ut_id}; updated_rows_count: {updated_rows_count}"
def send_reminder(self, now=None): self._abort_notification_check( event_type="reminder" ) # fetches latest info from Acuity if now is None: now = utils.now_with_tz() self.appointment_date_check(now) return self._notify_participant(event_type="reminder")
def create_user_task_(self, anon_project_specific_user_id, status="complete"): ut_dict = { "user_id": apsuid_2_user_id(anon_project_specific_user_id), "project_task_id": self.project_task_id, "status": status, "consented": str(utils.now_with_tz()), } ut = UserTask() ut.create_user_task(ut_dict=ut_dict)
def patch_user_api(event, context): logger = event["logger"] correlation_id = event["correlation_id"] # get info supplied to api call user_id = event["pathParameters"]["id"] try: user_jsonpatch = JsonPatch.from_string(event["body"]) except InvalidJsonPatch: raise utils.DetailedValueError( "invalid jsonpatch", details={ "traceback": traceback.format_exc(), "correlation_id": correlation_id, }, ) # convert email to lowercase for p in user_jsonpatch: if p.get("path") == "/email": p["value"] = p["value"].lower() # strip leading and trailing spaces try: p["value"] = p["value"].strip() except KeyError: raise utils.DetailedValueError( "invalid jsonpatch", details={ "traceback": traceback.format_exc(), "correlation_id": correlation_id, }, ) logger.info( "API call", extra={ "user_id": user_id, "user_jsonpatch": user_jsonpatch, "correlation_id": correlation_id, "event": event, }, ) modified_time = utils.now_with_tz() # create an audit record of update, inc 'undo' patch entity_update = create_user_entity_update(user_id, user_jsonpatch, modified_time, correlation_id) patch_user(user_id, user_jsonpatch, modified_time, correlation_id) # on successful update save audit record entity_update.save() return {"statusCode": HTTPStatus.NO_CONTENT, "body": json.dumps("")}
def update_latest_participant_notification(self): self.latest_participant_notification = str(utils.now_with_tz()) result = self._ddb_client.update_item( table_name=APPOINTMENTS_TABLE, key=self.appointment_id, name_value_pairs={ "latest_participant_notification": self.latest_participant_notification }, ) assert ( result["ResponseMetadata"]["HTTPStatusCode"] == HTTPStatus.OK ), f"Call to ddb client update_item method failed with response {result}" return result["ResponseMetadata"]["HTTPStatusCode"]
def redirect_to_user_interview_task(event, context): """ Updates user task url in response to user_interview_task events posted by Qualtrics """ detail_type = event["detail-type"] assert (detail_type == "user_interview_task" ), f"Unexpected detail-type: {detail_type}" event_detail = event["detail"] correlation_id = event["id"] try: anon_user_task_id = utils.validate_uuid( event_detail.pop("anon_user_task_id")) except KeyError as exc: raise utils.DetailedValueError( f"Mandatory {exc} data not found in source event", details={ "event": event, }, ) ssm_client = SsmClient() vcs_param = ssm_client.get_parameter(name="video-call-system") ut_base_url = vcs_param["base-url"] user_task_url = f"{ut_base_url}?response_id={event_detail['response_id']}" ut_id = anon_user_task_id_2_user_task_id(anon_user_task_id, correlation_id=correlation_id) updated_rows_count = execute_non_query( sql_q.UPDATE_USER_TASK_URL, ( user_task_url, str(utils.now_with_tz()), str(ut_id), ), correlation_id, ) assert ( updated_rows_count == 1 ), f"Failed to update url of user task {ut_id}; updated_rows_count: {updated_rows_count}" body = { "user_task_id": ut_id, "user_task_url": user_task_url, } return {"statusCode": HTTPStatus.OK, "body": json.dumps(body)}
def get_appointments_to_be_deleted(self, now=None): """ Queries ddb for appointments booked for 60 days ago """ if now is None: now = utils.now_with_tz() date_format = "%Y-%m-%d" sixty_days_ago = now - datetime.timedelta(days=60) sixty_days_ago_string = sixty_days_ago.strftime(date_format) result = self.ddb_client.query( table_name=APPOINTMENTS_TABLE, IndexName="reminders-index", KeyConditionExpression="appointment_date = :date", ExpressionAttributeValues={ ":date": sixty_days_ago_string, }, ) return [x["id"] for x in result]
def check_appointment_in_the_past(appointment_instance, appointment_datetime=None): if appointment_datetime is None: appointment_datetime = parser.parse( appointment_instance.acuity_info["datetime"]) else: appointment_datetime = parser.parse(appointment_datetime) two_hours_ago = utils.now_with_tz() - datetime.timedelta(hours=2) if appointment_datetime < two_hours_ago: appointment_instance._logger.info( "Notification aborted; appointment is in the past", extra={ "appointment": appointment_instance.as_dict(), "correlation_id": appointment_instance._correlation_id, }, ) return True else: return False
def get_appointments_to_be_reminded(self, now=None): if now is None: now = utils.now_with_tz() date_format = "%Y-%m-%d" tomorrow = now + datetime.timedelta(days=1) today_string = now.strftime(date_format) tomorrow_string = tomorrow.strftime(date_format) return self.ddb_client.query( table_name=APPOINTMENTS_TABLE, IndexName="reminders-index", KeyConditionExpression="appointment_date = :date " "AND latest_participant_notification " "BETWEEN :t1 AND :t2", ExpressionAttributeValues={ ":date": tomorrow_string, ":t1": "2020-00-00", # excludes 0000-00-00 appointments only because those will not have received the initial booking notification yet ":t2": today_string, }, )
def common_routine( self, test_now, expected_appointment_id, expected_notifications, appointment_class, expected_result=None, expected_notifications_number=1, ): if expected_result is None: expected_result = [(HTTPStatus.NO_CONTENT, expected_appointment_id) ] result = self.reminders_attempt(test_now=test_now) now = utils.now_with_tz() self.assertEqual(expected_result, result) # check notification notifications = self.ddb_client.scan( table_name=self.notifications_table, table_name_verbatim=True, ) self.assertEqual(expected_notifications_number, len(notifications)) attributes_to_ignore = [ "created", "id", "modified", "processing_error_message", "processing_fail_count", "processing_status", ] if expected_notifications_number: for n in notifications: for a in attributes_to_ignore: del n[a] self.assertCountEqual(expected_notifications, notifications) # check appointment latest notification updated in ddb appointment = appointment_class( appointment_id=expected_appointment_id) appointment.ddb_load() latest_notification_datetime = parser.parse( appointment.latest_participant_notification) difference = abs(now - latest_notification_datetime) self.assertLess(difference.seconds, 20)
def clear_notification_queue(event, context): logger = event["logger"] correlation_id = event["correlation_id"] seven_days_ago = now_with_tz() - timedelta(days=7) # processed_notifications = get_notifications('processing_status', ['processed']) processed_notifications = c_notif.get_notifications_to_clear( datetime_threshold=seven_days_ago, stack_name=const.STACK_NAME) notifications_to_delete = [ x for x in processed_notifications if (parser.isoparse(x["modified"]) < seven_days_ago) and ( x[NotificationAttributes.TYPE.value] != NotificationType.TRANSACTIONAL_EMAIL.value) ] deleted_notifications = list() ddb_client = Dynamodb(stack_name=const.STACK_NAME) for n in notifications_to_delete: response = ddb_client.delete_item(c_notif.NOTIFICATION_TABLE_NAME, n["id"], correlation_id=correlation_id) if response["ResponseMetadata"][ "HTTPStatusCode"] == http.HTTPStatus.OK: deleted_notifications.append(n) else: logger.info( f"Notifications deleted before an error occurred", extra={ "deleted_notifications": deleted_notifications, "correlation_id": correlation_id, }, ) logger.error( "Failed to delete notification", extra={ "notification": n, "response": response }, ) raise Exception( f"Failed to delete notification {n}; received response: {response}" ) return deleted_notifications
def _create_user_task_process_optional_data(self, ut_dict): optional_fields_name_default_and_validator = [ ("id", str(uuid.uuid4()), utils.validate_uuid), ("anon_user_task_id", str(uuid.uuid4()), utils.validate_uuid), ("created", str(utils.now_with_tz()), utils.validate_utc_datetime), ("status", DEFAULT_STATUS, self._validate_status), ] for ( variable_name, default_value, validating_func, ) in optional_fields_name_default_and_validator: if variable_name in ut_dict: try: self.__dict__[variable_name] = validating_func( ut_dict[variable_name] ) # https://stackoverflow.com/a/4687672 except utils.DetailedValueError as err: err.add_correlation_id(self._correlation_id) raise err else: self.__dict__[variable_name] = default_value
def patch_user( id_to_update, patch_json, modified_time=utils.now_with_tz(), correlation_id=new_correlation_id(), ): """ Args: id_to_update: patch_json: modified_time: correlation_id: Returns: Total number of rows updated in RDS database """ mappings = { "email": { "table_name": "public.projects_user", "column_name": "email" }, "email_address_verified": { "table_name": "public.projects_user", "column_name": "email_address_verified", }, "title": { "table_name": "public.projects_user", "column_name": "title" }, "first_name": { "table_name": "public.projects_user", "column_name": "first_name", }, "last_name": { "table_name": "public.projects_user", "column_name": "last_name" }, "auth0_id": { "table_name": "public.projects_user", "column_name": "auth0_id" }, "status": { "table_name": "public.projects_user", "column_name": "status" }, "country_code": { "table_name": "public.projects_user", "column_name": "country_code", }, "crm_id": { "table_name": "public.projects_user", "column_name": "crm_id" }, } id_column = "id" return execute_jsonpatch(id_column, id_to_update, mappings, patch_json, modified_time, correlation_id)
def time_x_hours_ago(x_hours): x_hours_ago = utils.now_with_tz() - datetime.timedelta(hours=x_hours) return x_hours_ago.strftime("%Y-%m-%d %H:%M:%S.%f")
def create_user_external_account(uea_json, correlation_id): # json MUST contain: external_system_id, user_id, external_user_id # json may OPTIONALLY include: id, created, status # extract mandatory data from json try: external_system_id = utils.validate_uuid( uea_json["external_system_id"]) user_id = utils.validate_uuid(uea_json["user_id"]) external_user_id = uea_json["external_user_id"] except utils.DetailedValueError as err: err.add_correlation_id(correlation_id) raise err except KeyError as err: errorjson = { "parameter": err.args[0], "correlation_id": str(correlation_id) } raise utils.DetailedValueError("mandatory data missing", errorjson) from err # now process optional json data if "id" in uea_json: try: id = utils.validate_uuid(uea_json["id"]) except utils.DetailedValueError as err: err.add_correlation_id(correlation_id) raise err else: id = str(uuid.uuid4()) if "created" in uea_json: try: created = utils.validate_utc_datetime(uea_json["created"]) except utils.DetailedValueError as err: err.add_correlation_id(correlation_id) raise err else: created = str(utils.now_with_tz()) if "status" in uea_json: try: status = validate_status(uea_json["status"]) except utils.DetailedValueError as err: err.add_correlation_id(correlation_id) raise err else: status = DEFAULT_STATUS # check external account does not already exist existing = check_user_id_and_external_account(user_id, external_system_id, correlation_id) if len(existing) > 0: errorjson = { "user_id": user_id, "external_system_id": external_system_id, "correlation_id": str(correlation_id), } raise utils.DuplicateInsertError( "user_external_account already exists", errorjson) # lookup user id (needed for insert) for user uuid (supplied in json) existing_user = get_user_by_id(user_id, correlation_id) if len(existing_user) == 0: errorjson = {"user_id": user_id, "correlation_id": str(correlation_id)} raise utils.ObjectDoesNotExistError("user does not exist", errorjson) execute_non_query( CREATE_USER_EXTERNAL_ACCOUNT_SQL, (id, created, created, external_system_id, user_id, external_user_id, status), correlation_id, ) new_user_external_account = { "id": id, "created": created, "modified": created, "external_system_id": external_system_id, "user_id": user_id, "external_user_id": external_user_id, "status": status, } return new_user_external_account
def create_user(user_json, correlation_id): # json MUST contain: email, first_name, last_name, status # json may OPTIONALLY include: id, title, created, auth0_id # note that users will always be created with email_address_verified = false # extract mandatory data from json try: email = user_json["email"].lower().strip() first_name = user_json["first_name"].strip() last_name = user_json["last_name"].strip() status = validate_status(user_json["status"]) country_code = user_json["country_code"].strip() country_utils.get_country_name(country_code) # looking up the name is a way of validating the code - an invalid code will raise an error except utils.DetailedValueError as err: err.add_correlation_id(correlation_id) raise err # now process optional json data if "id" in user_json: try: id = utils.validate_uuid(user_json["id"]) except utils.DetailedValueError as err: err.add_correlation_id(correlation_id) raise err else: id = str(uuid.uuid4()) if "created" in user_json: try: created = utils.validate_utc_datetime(user_json["created"]) except utils.DetailedValueError as err: err.add_correlation_id(correlation_id) raise err else: created = str(utils.now_with_tz()) if "auth0_id" in user_json: auth0_id = user_json["auth0_id"] else: auth0_id = None if "title" in user_json: title = user_json["title"] else: title = None existing_user = get_user_by_id(id, correlation_id) if len(existing_user) > 0: errorjson = {"id": id, "correlation_id": str(correlation_id)} raise utils.DuplicateInsertError("user already exists", errorjson) params = ( id, created, created, email, title, first_name, last_name, country_code, auth0_id, status, ) execute_non_query(sql_q.CREATE_USER_SQL, params, correlation_id) new_user = { "id": id, "created": created, "modified": created, "email": email, "title": title, "first_name": first_name, "last_name": last_name, "auth0_id": auth0_id, "crm_id": None, "country_code": country_code, "status": status, } new_user = append_calculated_properties(new_user) return new_user
def create_user_project(up_json, correlation_id, do_nothing_if_exists=False): """ Inserts new UserProject row in thiscovery db Args: up_json: must contain user_id and project_id; may optionally include id, created, status, anon_project_specific_user_id correlation_id: do_nothing_if_exists: Returns: """ # extract mandatory data from json try: user_id = utils.validate_uuid( up_json["user_id"]) # all public id are uuids project_id = utils.validate_uuid(up_json["project_id"]) except utils.DetailedValueError as err: err.add_correlation_id(correlation_id) raise err except KeyError as err: errorjson = { "parameter": err.args[0], "correlation_id": str(correlation_id) } raise utils.DetailedValueError("mandatory data missing", errorjson) from err # now process optional json data optional_fields_name_default_and_validator = [ ("anon_project_specific_user_id", str(uuid.uuid4()), utils.validate_uuid), ("created", str(utils.now_with_tz()), utils.validate_utc_datetime), ("status", DEFAULT_STATUS, validate_status), ] for ( variable_name, default_value, validating_func, ) in optional_fields_name_default_and_validator: if variable_name in up_json: try: globals()[variable_name] = validating_func( up_json[variable_name] ) # https://stackoverflow.com/a/4687672 except utils.DetailedValueError as err: err.add_correlation_id(correlation_id) raise err else: globals()[variable_name] = default_value # id shadows builtin function, so treat if separately (using globals() approach above would overwrite that function) if "id" in up_json: try: id = utils.validate_uuid(up_json["id"]) except utils.DetailedValueError as err: err.add_correlation_id(correlation_id) raise err else: id = str(uuid.uuid4()) # check external account does not already exist existing = get_existing_user_project_id(user_id, project_id, correlation_id) if len(existing) > 0: if do_nothing_if_exists: return existing[0] else: errorjson = { "user_id": user_id, "project_id": project_id, "correlation_id": str(correlation_id), } raise utils.DuplicateInsertError("user_project already exists", errorjson) # lookup user id (needed for insert) for user uuid (supplied in json) result = get_user_by_id(user_id, correlation_id) if len(result) == 0: errorjson = {"user_id": user_id, "correlation_id": str(correlation_id)} raise utils.ObjectDoesNotExistError("user does not exist", errorjson) execute_non_query( CREATE_USER_PROJECT_SQL, ( id, created, created, user_id, project_id, status, anon_project_specific_user_id, ), correlation_id, ) new_user_project = { "id": id, "created": created, "modified": created, "user_id": user_id, "project_id": project_id, "status": status, "anon_project_specific_user_id": anon_project_specific_user_id, } return new_user_project
import uuid from src.interview_tasks import InterviewTask def main(task_dict, task_id=None): if task_id is None: task_id = str(uuid.uuid4()) project_task_id = task_dict["project_task_id"] interview_task = InterviewTask(project_task_id, task_id) interview_task.from_dict(item_dict=task_dict) interview_task.ddb_dump() return task_id if __name__ == "__main__": interview_task_dict = { "project_task_id": "TBC", "name": "TBC", "short_name": "TBC", "description": "TBC", "completion_url": "TBC", "on_demand_available": False, "on_demand_survey_id": "TBC", "live_available": True, "live_survey_id": "TBC", "appointment_type_id": "TBC", "modified": utils.now_with_tz(), } new_task_id = main(task_dict=interview_task_dict, ) print(f"ID of new interview task: {new_task_id}")