def __init__(self, logger=None, correlation_id=None): self.ddb_client = Dynamodb(stack_name=STACK_NAME) self.correlation_id = correlation_id self.target_appointment_ids = self.get_appointments_to_be_deleted() self.logger = logger if logger is None: self.logger = utils.get_logger()
def populate_appointments_table(cls, fast_mode=True): """ Args: fast_mode: if True, uses ddb batch_writer to quickly populate the appointments table but items will not contain created, modified and type fields added by Dynamodb.put_item """ if fast_mode: ddb_client = Dynamodb(stack_name=STACK_NAME) app_table = ddb_client.get_table(table_name=app.APPOINTMENTS_TABLE) with app_table.batch_writer() as batch: for appointment in test_data.appointments.values(): appointment["id"] = appointment["appointment_id"] batch.put_item(appointment) else: for appointment_dict in test_data.appointments.values(): appointment = app.AcuityAppointment( appointment_dict["appointment_id"]) appointment.from_dict(appointment_dict) at = app.AppointmentType() at.from_dict(appointment.appointment_type) appointment.appointment_type = at try: appointment.ddb_dump() except utils.DetailedValueError: cls.logger.debug( "PutItem failed, which probably " "means Appointment table already contains " "the required test data; aborting this method", extra={}, ) break
def __init__(self, interview_task_id, **kwargs): self._project_task_id = kwargs.get("project_task_id") self._interview_task_id = interview_task_id optional_attributes = [ "name", "short_name", "description", "completion_url", "on_demand_available", "on_demand_questions_random", "live_available", "live_questions_random", "appointment_type_id", "modified", ] index_key_attributes = [ # these cannot be None "on_demand_survey_id", "live_survey_id", ] for oa in optional_attributes: self.__dict__[oa] = kwargs.get(oa) for ika in index_key_attributes: key_attribute = kwargs.get(ika) if key_attribute: self.__dict__[ika] = key_attribute self._ddb_client = Dynamodb(stack_name=const.STACK_NAME)
def __init__(self, ddb_client=None, acuity_client=None, logger=None, correlation_id=None): self.type_id = None self.name = None self.category = None self.has_link = None self.send_notifications = None self.templates = None self.modified = None # flag used in ddb_load method to check if ddb data was already fetched self.project_task_id = None self.system = None # Views, MyInterview, etc self._logger = logger self._correlation_id = correlation_id if logger is None: self._logger = utils.get_logger() self._ddb_client = ddb_client if ddb_client is None: self._ddb_client = Dynamodb(stack_name=STACK_NAME) self._acuity_client = acuity_client if acuity_client is None: self._acuity_client = AcuityClient( correlation_id=self._correlation_id)
def __init__(self, qualtrics_account_name="cambridge", survey_id=None, correlation_id=None): client = SurveyDefinitionsClient( qualtrics_account_name=qualtrics_account_name, survey_id=survey_id, correlation_id=correlation_id, ) response = client.get_survey() assert (response["meta"]["httpStatus"] == "200 - OK" ), f"Call to Qualtrics API failed with response {response}" self.survey_id = survey_id self.definition = response["result"] self.flow = self.definition["SurveyFlow"]["Flow"] self.blocks = self.definition["Blocks"] self.questions = self.definition["Questions"] self.modified = self.definition["LastModified"] self.ddb_client = Dynamodb(stack_name=const.STACK_NAME) self.logger = utils.get_logger() self.logger.debug( "Initialised SurveyDefinition", extra={ "__dict__": self.__dict__, "correlation_id": correlation_id, }, )
def __init__(self, appointment_id, logger=None, correlation_id=None): self.appointment_id = str(appointment_id) self.acuity_info = None self.calendar_id = None self.calendar_name = None self.participant_email = None self.participant_user_id = None self.appointment_type = AppointmentType() self.latest_participant_notification = ( "0000-00-00 00:00:00+00:00" # used as GSI sort key, so cannot be None ) self.appointment_date = None self.anon_project_specific_user_id = None self.anon_user_task_id = None self.appointment_type_id = None self._logger = logger if self._logger is None: self._logger = utils.get_logger() self._correlation_id = correlation_id self._ddb_client = Dynamodb(stack_name=STACK_NAME) self._core_api_client = CoreApiClient( correlation_id=self._correlation_id) # transactional emails self._acuity_client = AcuityClient(correlation_id=self._correlation_id) self.original_appointment = ( None # used to store appointment history if rescheduled )
def __init__(self, response_dict, correlation_id=None): self.survey_id = response_dict.pop("survey_id", None) self.response_id = response_dict.pop("response_id", None) self.project_task_id = str( utils.validate_uuid(response_dict.pop("project_task_id", None))) self.anon_project_specific_user_id = str( utils.validate_uuid( response_dict.pop("anon_project_specific_user_id", None))) self.anon_user_task_id = str( utils.validate_uuid(response_dict.pop("anon_user_task_id", None))) for required_parameter_name, value in [ ("survey_id", self.survey_id), ("response_id", self.response_id), ]: if not value: raise utils.DetailedValueError( f"Required parameter {required_parameter_name} not present in body of call", details={ "response_dict": response_dict, "correlation_id": correlation_id, }, ) self.response_dict = response_dict self.ddb_client = Dynamodb( stack_name=const.STACK_NAME, correlation_id=correlation_id, ) self.correlation_id = correlation_id
def __init__( self, response_id, event_time, anon_project_specific_user_id=None, anon_user_task_id=None, detail_type=None, detail=None, correlation_id=None, account=None, ): self._response_id = response_id self._event_time = event_time self.account = account self.anon_project_specific_user_id = anon_project_specific_user_id self.anon_user_task_id = anon_user_task_id self._detail_type = detail_type self._detail = detail self._correlation_id = correlation_id self._core_client = CoreApiClient(correlation_id=correlation_id) self._ddb_client = Dynamodb(stack_name=const.STACK_NAME, correlation_id=correlation_id) self.project_task_id = None split_response_id = response_id.split("-") assert ( len(split_response_id) == 2 ), f"response_id ({response_id}) not in expected format SurveyID-QualtricsResponseID" self.survey_id = split_response_id[0] self.qualtrics_response_id = split_response_id[1]
def __init__(self, logger, correlation_id): self.logger = logger self.correlation_id = correlation_id self.calendars_table = "Calendars" self.blocks_table = "CalendarBlocks" self.ddb_client = Dynamodb(stack_name=STACK_NAME) self.acuity_client = AcuityClient() self.sns_client = SnsClient()
def persist_thiscovery_event(event, context): event.pop("logger", None) event.pop("correlation_id", None) ddb_client = Dynamodb(stack_name=const.STACK_NAME, correlation_id=event["id"]) table = ddb_client.get_table(table_name=const.AUDIT_TABLE) result = table.put_item(Item=event) assert result["ResponseMetadata"]["HTTPStatusCode"] == HTTPStatus.OK return {"statusCode": HTTPStatus.OK, "body": json.dumps("")}
def setUpClass(cls): super().setUpClass() cls.ddb_client = Dynamodb(stack_name=const.STACK_NAME) cls.ddb_client.delete_all( table_name=const.AUTH0_EVENTS_TABLE_NAME, key_name=const.AUTH0_EVENTS_TABLE_HASH, sort_key_name=const.AUTH0_EVENTS_TABLE_SORT, ) cls.eb_client = EventbridgeClient()
def ddb_load_interview_questions(survey_id): ddb_client = Dynamodb(stack_name=const.STACK_NAME) return ddb_client.query( table_name=const.INTERVIEW_QUESTIONS_TABLE["name"], KeyConditionExpression="survey_id = :survey_id", ExpressionAttributeValues={ ":survey_id": survey_id, }, )
def __init__(self, csvfile_path, consent_questions_tags, consent_info_url): super(ConsentEmailsManager, self).__init__(csvfile_path) self.consent_question_tags = consent_questions_tags self.consent_info_url = consent_info_url self.consent_statements = dict() self.ddb_client = Dynamodb(stack_name=const.STACK_NAME) self.anon_user_task_id_column_name = "anon_user_task_id" self.anon_project_specific_user_id_column_name = "anon_project_specific_user_id" self.consent_datetime_column_name = "StartDate" self.sent_emails = list()
def setup(): ddb_client = Dynamodb(stack_name=const.STACK_NAME) ddb_client.delete_all( table_name=const.AUTH0_EVENTS_TABLE_NAME, key_name=const.AUTH0_EVENTS_TABLE_HASH, sort_key_name=const.AUTH0_EVENTS_TABLE_SORT, ) ddb_client.batch_put_items( const.AUTH0_EVENTS_TABLE_NAME, td.METRICS_TEST_DATA, const.AUTH0_EVENTS_TABLE_HASH, )
def __init__( self, account: str, survey_id: str, contact_list_id: str, project_task_id: str, correlation_id: str = None, ): self.account = account self.survey_id = survey_id self.account_survey_id = f"{account}_{survey_id}" self.contact_list_id = contact_list_id self.dist_client = qualtrics.DistributionsClient( qualtrics_account_name=account) self.correlation_id = correlation_id self.ddb_client = Dynamodb(stack_name=const.STACK_NAME, correlation_id=correlation_id) self.project_task_id = project_task_id
def add_template_to_ddb(template_id, template_name, template_type, formatted_custom_properties, preview_url): ddb_client = Dynamodb() ddb_client.put_item( table_name="HubspotEmailTemplates", key=template_name, item_type=template_type, item_details={ "preview_url": preview_url, }, item={ "bcc": [], "cc": [], "contact_properties": [], "custom_properties": formatted_custom_properties, "from": TRANSACTIONAL_EMAILS_FROM_ADDRESS, "hs_template_id": template_id, }, )
def __init__( self, survey_id, anon_project_specific_user_id, account, project_task_id, correlation_id=None, ): self.survey_id = survey_id self.anon_project_specific_user_id = anon_project_specific_user_id self.account = account self.account_survey_id = f"{account}_{survey_id}" self.correlation_id = correlation_id if correlation_id is None: self.correlation_id = utils.new_correlation_id() self.ddb_client = Dynamodb(stack_name=const.STACK_NAME, correlation_id=self.correlation_id) self.project_task_id = project_task_id
def get_unassigned_links(account_survey_id: str, ddb_client=None) -> list[dict]: """ Retrieves existing personal links that have not yet been assigned to an user """ if ddb_client is None: ddb_client = Dynamodb(stack_name=const.STACK_NAME) return ddb_client.query( table_name=const.PersonalLinksTable.NAME, IndexName="unassigned-links", KeyConditionExpression="account_survey_id = :account_survey_id " "AND #status = :link_status", ExpressionAttributeValues={ ":account_survey_id": account_survey_id, ":link_status": "new", }, ExpressionAttributeNames={ "#status": "status" }, # needed because status is a reserved word in ddb )
def clear_notification_queue(event, context): logger = event["logger"] correlation_id = event["correlation_id"] seven_days_ago = now_with_tz() - timedelta(days=7) # processed_notifications = get_notifications('processing_status', ['processed']) processed_notifications = c_notif.get_notifications_to_clear( datetime_threshold=seven_days_ago, stack_name=const.STACK_NAME) notifications_to_delete = [ x for x in processed_notifications if (parser.isoparse(x["modified"]) < seven_days_ago) and ( x[NotificationAttributes.TYPE.value] != NotificationType.TRANSACTIONAL_EMAIL.value) ] deleted_notifications = list() ddb_client = Dynamodb(stack_name=const.STACK_NAME) for n in notifications_to_delete: response = ddb_client.delete_item(c_notif.NOTIFICATION_TABLE_NAME, n["id"], correlation_id=correlation_id) if response["ResponseMetadata"][ "HTTPStatusCode"] == http.HTTPStatus.OK: deleted_notifications.append(n) else: logger.info( f"Notifications deleted before an error occurred", extra={ "deleted_notifications": deleted_notifications, "correlation_id": correlation_id, }, ) logger.error( "Failed to delete notification", extra={ "notification": n, "response": response }, ) raise Exception( f"Failed to delete notification {n}; received response: {response}" ) return deleted_notifications
def __init__(self, **kwargs): self._logger = utils.get_logger() self._correlation_id = kwargs.get("correlation_id") self.destination_survey_id = kwargs.get("survey_id") self.destination_response_id = kwargs.get("response_id") err_message = "Call to initialise_survey missing mandatory data: {}" assert self.destination_survey_id, err_message.format("survey_id") assert self.destination_survey_id, err_message.format("response_id") self.survey_init_config = SurveyInitConfig( destination_survey_id=self.destination_survey_id, correlation_id=self._correlation_id, ) self.survey_init_config.get() try: self.survey_init_config.details except AttributeError: raise utils.ObjectDoesNotExistError( f"Initialisation config not found for survey {self.destination_survey_id}", details={}, ) self.destination_account = kwargs.get("account", "cambridge") self.anon_project_specific_user_id = kwargs.get( "anon_project_specific_user_id") assert self.anon_project_specific_user_id, err_message.format( "anon_project_specific_user_id") self._core_client = CoreApiClient(correlation_id=self._correlation_id) user = self._core_client.get_user_by_anon_project_specific_user_id( self.anon_project_specific_user_id) self.user_id = user["id"] self.ddb_client = Dynamodb(stack_name=const.STACK_NAME, correlation_id=self._correlation_id) self.cached_responses = dict() self.missing_responses = list() self.target_embedded_data = dict() self.responses_client = ResponsesClient( survey_id=self.destination_survey_id, qualtrics_account_name=self.destination_account, correlation_id=self._correlation_id, )
def get_appointments_by_type(type_ids, correlation_id=None): """ Args: type_ids (list): Appointment type ids to query ddb correlation_id: Returns: List of appointments matching any of the input type ids """ ddb_client = Dynamodb(stack_name=STACK_NAME, correlation_id=correlation_id) items = list() for i in type_ids: result = ddb_client.query( table_name=APPOINTMENTS_TABLE, IndexName="project-appointments-index", KeyConditionExpression="appointment_type_id = :type_id", ExpressionAttributeValues={ ":type_id": i, }, ) items += result return items
def __init__(self, consent_id=None, core_api_client=None, correlation_id=None): self.project_id = None self.project_short_name = None self.project_name = None self.project_task_id = None self.consent_id = consent_id if consent_id is None: self.consent_id = str(uuid.uuid4()) self.consent_datetime = None self.anon_project_specific_user_id = None self.anon_user_task_id = None self.consent_statements = None self.modified = None # flag used in ddb_load method to check if ddb data was already fetched self._correlation_id = correlation_id self._ddb_client = Dynamodb(stack_name=STACK_NAME) self._core_api_client = core_api_client if core_api_client is None: self._core_api_client = CoreApiClient( correlation_id=correlation_id)
def test_deletion_of_questions_no_longer_in_survey_definition(self): mock_deleted_question = { "block_id": "BL_3qH1dnbq50y9V0a", "block_name": "Your experience using thiscovery", "question_description": "<p>Take a moment to reflect on how much you like bread before answering this question.</p>", "question_id": "QID12", "question_name": "Q12", "question_text": "<h3>Is thiscovery the best invention since sliced bread?</h3>", "sequence_no": "12", "survey_id": "SV_eDrjXPqGElN0Mwm", "survey_modified": "2021-02-16T15:59:12Z", } ddb_client = Dynamodb(stack_name=const.STACK_NAME) ddb_client.put_item( table_name=const.INTERVIEW_QUESTIONS_TABLE["name"], key=mock_deleted_question["survey_id"], item_type="interview_question", item_details=None, item=mock_deleted_question, update_allowed=True, key_name=const.INTERVIEW_QUESTIONS_TABLE["partition_key"], sort_key={ const.INTERVIEW_QUESTIONS_TABLE["sort_key"]: mock_deleted_question["question_id"] }, ) test_event = copy.deepcopy( td.TEST_INTERVIEW_QUESTIONS_UPDATED_EB_EVENT) result = ep.put_interview_questions(test_event, None) self.assertEqual(HTTPStatus.OK, result["statusCode"]) updated_question_ids, deleted_question_ids = json.loads(result["body"]) self.assertEqual(4, len(updated_question_ids)) self.assertEqual([mock_deleted_question["question_id"]], deleted_question_ids)
def main(): calendar_name = input( "Please input the name of the calendar you want to add to Dynamodb, as shown in Acuity's UI:" ) if not calendar_name: sys.exit() acuity_client = AcuityClient() ddb_client = Dynamodb(stack_name=STACK_NAME) acuity_calendars = acuity_client.get_calendars() pprint(acuity_calendars) target_calendar = None for c in acuity_calendars: if c["name"] == calendar_name: target_calendar = c continue if target_calendar: response = ddb_client.put_item( "Calendars", target_calendar["id"], item_type="acuity-calendar", item_details=target_calendar, item={ "label": target_calendar["name"], "block_monday_morning": True, "emails_to_notify": list(), "myinterview_link": None, }, ) assert ( response["ResponseMetadata"]["HTTPStatusCode"] == HTTPStatus.OK ), f"Dynamodb client put_item operation failed with response: {response}" print( f'Calendar "{calendar_name}" successfully added to Dynamodb table') else: raise utils.ObjectDoesNotExistError( f'Calendar "{calendar_name}" not found in Acuity')
def __init__(self, appointment, logger=None, ddb_client=None, correlation_id=None): """ Args: appointment: instance of BaseAcuityAppointment or subclasses logger: correlation_id: """ self.appointment = appointment self.project_id = None self.project_name = None self.anon_project_specific_user_id = None self.interviewer_calendar_ddb_item = None self.logger = logger if logger is None: self.logger = utils.get_logger() self.correlation_id = correlation_id self.ddb_client = ddb_client if ddb_client is None: self.ddb_client = Dynamodb(stack_name=STACK_NAME)
def get_forward_to_address(received_for, correlation_id=None): """ Args: received_for: correlation_id: Returns: Notes: This function can probably be optimised by making a call to the scan method of ddb_client and then parsing the results, rather than making up to three separate calls to get_item """ ddb_client = Dynamodb(stack_name=STACK_NAME) # try matching full received_for email address ddb_item = ddb_client.get_item(table_name='ForwardingMap', key=received_for, correlation_id=correlation_id) if ddb_item is not None: return ddb_item['forward-to'] # try matching subdomain subdomain = received_for.split('@')[1] ddb_item = ddb_client.get_item(table_name='ForwardingMap', key=subdomain, correlation_id=correlation_id) if ddb_item is not None: return ddb_item['forward-to'] # go for the domain catch-all rule ddb_item = ddb_client.get_item(table_name='ForwardingMap', key="thiscovery.org", correlation_id=correlation_id) if ddb_item is not None: return ddb_item['forward-to']
def clear_appointments_table(cls): try: cls.ddb_client.delete_all(table_name=app.APPOINTMENTS_TABLE) except AttributeError: cls.ddb_client = Dynamodb(stack_name=STACK_NAME) cls.ddb_client.delete_all(table_name=app.APPOINTMENTS_TABLE)
def main(output_filename='test_data_draft.py', items_n=100, depth=None): ddb_client = Dynamodb(stack_name=const.STACK_NAME) items = ddb_client.scan(table_name=const.AUTH0_EVENTS_TABLE_NAME, ) selected_items = anonymise_data(random.sample(items, items_n)) with open(output_filename, 'w') as f: pprint(selected_items, stream=f, depth=depth)
def setUpClass(cls): super().setUpClass() cls.ddb = Dynamodb()
def setUpClass(cls): ddb_client = Dynamodb(stack_name=const.STACK_NAME) survey_init_table = const.SurveyInitTable() # setup test data test_task_response = { "survey_id": "SV_cD6pZ6NoZSpnL8i", "participant_responses": { "Q2": 2, "Q3": "Quality Assurance Engineer", "Q4": ["1", "3"], "Q2_DO": ["1", "2", "3"], "RecipientEmail": None, "RecipientLastName": None, "DistributionChannel": "preview", "Q4_DO": ["1", "2", "3"], "Set a Value Now": None, "RecipientFirstName": None, "LocationLatitude": "53.3793029785", "StartDate": "2022-01-04T14:05:59Z", "ExternalReference": None, "Status": 1, "Finished": 1, "Progress": 100, "Q3_DO": None, "Duration (in seconds)": 39, "anon_project_specific_user_id": "29aca87c-e0f9-44c2-b97e-22cbe842a908", "EndDate": "2022-01-04T14:06:38Z", "RecordedDate": "2022-01-04T14:06:39.384Z", "UserLanguage": "EN-GB", "LocationLongitude": "-1.46020507813", "IPAddress": None, }, "anon_project_specific_user_id": "29aca87c-e0f9-44c2-b97e-22cbe842a908", "anon_user_task_id": "fb92e1f0-e756-47db-ab5c-232c3618999a", "project_task_id": "8e0fb129-f6b6-4b6b-a01a-cfdb14f8fec8", "qualtrics_response_id": "R_1Howgi70DgOqcKG", "user_id": "1cbe9aad-b29f-46b5-920e-b4c496d42515", "account": "cambridge", } survey_init_table.put_item( **{ "destination_survey_id": "SV_aWDwvBuOqAsxrkq", "item_details": { "job": [{ "question": "Q3", "survey": "SV_cD6pZ6NoZSpnL8i" }], "age": [{ "question": "Q2", "survey": "SV_cD6pZ6NoZSpnL8i" }], "food": [{ "question": "Q4", "survey": "SV_cD6pZ6NoZSpnL8i" }], }, }, update=True, ) ddb_client.put_item( table_name=const.TASK_RESPONSES_TABLE["name"], key="SV_cD6pZ6NoZSpnL8i-R_1Howgi70DgOqcKG", key_name=const.TASK_RESPONSES_TABLE["partition_key"], item_type="survey_response", update_allowed=True, item_details={}, sort_key={ const.TASK_RESPONSES_TABLE["sort_key"]: "2022-01-04T14:06:39Z" }, item=test_task_response, ) survey_init_table.put_item( **{ "destination_survey_id": "SV_ezK42q9nZRtRCxU", "item_details": { "job": [{ "question": "Q3", "survey": "SV_0kUfBfy4Im8nMKq" }], "age": [{ "question": "Q2", "survey": "SV_0kUfBfy4Im8nMKq" }], "food": [{ "question": "Q4", "survey": "SV_0kUfBfy4Im8nMKq" }], }, }, update=True, ) ddb_client.put_item( table_name=const.TASK_RESPONSES_TABLE["name"], key="SV_0kUfBfy4Im8nMKq-R_KAd1R9N6PQNQ1SL", key_name=const.TASK_RESPONSES_TABLE["partition_key"], item_type="survey_response", update_allowed=True, item_details={}, sort_key={ const.TASK_RESPONSES_TABLE["sort_key"]: "2022-01-04T14:06:39Z" }, item={ **test_task_response, "survey_id": "SV_0kUfBfy4Im8nMKq", "qualtrics_response_id": "R_KAd1R9N6PQNQ1SL", "account": "thisinstitute", }, )