Esempio n. 1
0
def clear_notification_queue(event, context):
    logger = event["logger"]
    correlation_id = event["correlation_id"]
    seven_days_ago = now_with_tz() - timedelta(days=7)
    # processed_notifications = get_notifications('processing_status', ['processed'])
    processed_notifications = c_notif.get_notifications_to_clear(
        datetime_threshold=seven_days_ago, stack_name=const.STACK_NAME)
    notifications_to_delete = [
        x for x in processed_notifications
        if (parser.isoparse(x["modified"]) < seven_days_ago) and (
            x[NotificationAttributes.TYPE.value] !=
            NotificationType.TRANSACTIONAL_EMAIL.value)
    ]
    deleted_notifications = list()
    ddb_client = Dynamodb(stack_name=const.STACK_NAME)
    for n in notifications_to_delete:
        response = ddb_client.delete_item(c_notif.NOTIFICATION_TABLE_NAME,
                                          n["id"],
                                          correlation_id=correlation_id)
        if response["ResponseMetadata"][
                "HTTPStatusCode"] == http.HTTPStatus.OK:
            deleted_notifications.append(n)
        else:
            logger.info(
                f"Notifications deleted before an error occurred",
                extra={
                    "deleted_notifications": deleted_notifications,
                    "correlation_id": correlation_id,
                },
            )
            logger.error(
                "Failed to delete notification",
                extra={
                    "notification": n,
                    "response": response
                },
            )
            raise Exception(
                f"Failed to delete notification {n}; received response: {response}"
            )
    return deleted_notifications
class AppointmentsCleaner:
    def __init__(self, logger=None, correlation_id=None):
        self.ddb_client = Dynamodb(stack_name=STACK_NAME)
        self.correlation_id = correlation_id
        self.target_appointment_ids = self.get_appointments_to_be_deleted()
        self.logger = logger
        if logger is None:
            self.logger = utils.get_logger()

    def get_appointments_to_be_deleted(self, now=None):
        """
        Queries ddb for appointments booked for 60 days ago
        """
        if now is None:
            now = utils.now_with_tz()
        date_format = "%Y-%m-%d"
        sixty_days_ago = now - datetime.timedelta(days=60)
        sixty_days_ago_string = sixty_days_ago.strftime(date_format)
        result = self.ddb_client.query(
            table_name=APPOINTMENTS_TABLE,
            IndexName="reminders-index",
            KeyConditionExpression="appointment_date = :date",
            ExpressionAttributeValues={
                ":date": sixty_days_ago_string,
            },
        )
        return [x["id"] for x in result]

    def delete_old_appointments(self):
        results = list()
        for app_id in self.target_appointment_ids:
            result = self.ddb_client.delete_item(
                table_name=APPOINTMENTS_TABLE,
                key=app_id,
            )
            results.append(result["ResponseMetadata"]["HTTPStatusCode"])
        return results
class CalendarBlocker:
    def __init__(self, logger, correlation_id):
        self.logger = logger
        self.correlation_id = correlation_id
        self.calendars_table = "Calendars"
        self.blocks_table = "CalendarBlocks"
        self.ddb_client = Dynamodb(stack_name=STACK_NAME)
        self.acuity_client = AcuityClient()
        self.sns_client = SnsClient()

    def notify_sns_topic(self, message, subject):
        topic_arn = utils.get_secret(
            "sns-topics")["interview-notifications-arn"]
        self.sns_client.publish(
            message=message,
            topic_arn=topic_arn,
            Subject=subject,
        )

    def get_target_calendar_ids(self):
        calendars = self.ddb_client.scan(
            self.calendars_table,
            "block_monday_morning",
            [True],
        )
        return [(x["id"], x["label"]) for x in calendars]

    def block_upcoming_weekend(self, calendar_id):
        next_monday_date = next_weekday(0)
        block_end = datetime.datetime.combine(next_monday_date,
                                              datetime.time(hour=12, minute=0))
        saturday_before_next_monday = next_monday_date - datetime.timedelta(
            days=2)
        block_start = datetime.datetime.combine(
            saturday_before_next_monday, datetime.time(hour=0, minute=0))
        return self.acuity_client.post_block(calendar_id, block_start,
                                             block_end)

    def create_blocks(self):
        calendars = self.get_target_calendar_ids()
        self.logger.debug("Calendars to block", extra={"calendars": calendars})
        created_blocks_ids = list()
        affected_calendar_names = list()
        for i, name in calendars:
            try:
                block_dict = self.block_upcoming_weekend(i)
                created_blocks_ids.append(block_dict["id"])
                affected_calendar_names.append(name)
                response = self.ddb_client.put_item(
                    self.blocks_table,
                    block_dict["id"],
                    item_type="calendar-block",
                    item_details=block_dict,
                    item={
                        "status": "new",
                        "error_message": None,
                    },
                    correlation_id=self.correlation_id,
                )
                assert (
                    response["ResponseMetadata"]["HTTPStatusCode"] ==
                    HTTPStatus.OK
                ), f"Call to Dynamodb client put_item method failed with response: {response}. "
            except Exception as err:
                self.logger.error(
                    f"{repr(err)} {len(created_blocks_ids)} blocks were created before this error occurred. "
                    f"Created blocks ids: {created_blocks_ids}")
                raise

        return created_blocks_ids, affected_calendar_names

    def mark_failed_block_deletion(self, item_key, exception):
        error_message = f"This error happened when trying to delete Acuity calendar block {item_key}: {repr(exception)}"
        self.logger.error(error_message)
        self.ddb_client.update_item(
            self.blocks_table,
            item_key,
            name_value_pairs={
                "status": "error",
                "error_message": error_message
            },
        )

    def delete_blocks(self):
        blocks = self.ddb_client.scan(
            self.blocks_table,
            filter_attr_name="status",
            filter_attr_values=["new"],
        )
        deleted_blocks_ids = list()
        affected_calendar_names = list()
        for b in blocks:
            item_key = b.get("id")
            try:
                delete_response = self.acuity_client.delete_block(item_key)
                assert delete_response == HTTPStatus.NO_CONTENT, (
                    f"Call to Acuity client delete_block method failed with response: {delete_response}. "
                    f"{len(deleted_blocks_ids)} blocks were deleted before this error occurred. Deleted blocks ids: {deleted_blocks_ids}"
                )
                deleted_blocks_ids.append(item_key)
                affected_calendar_names.append(
                    self.acuity_client.get_calendar_by_id(
                        b["details"]["calendarID"])["name"])
                response = self.ddb_client.delete_item(
                    self.blocks_table,
                    item_key,
                    correlation_id=self.correlation_id)
                assert (
                    response["ResponseMetadata"]["HTTPStatusCode"] ==
                    HTTPStatus.OK
                ), (f"Call to Dynamodb client delete_item method failed with response: {response}. "
                    f"{len(deleted_blocks_ids)} blocks were deleted before this error occurred. Deleted blocks ids: {deleted_blocks_ids}"
                    )
            except Exception as err:
                self.mark_failed_block_deletion(item_key, err)
                continue

        return deleted_blocks_ids, affected_calendar_names
Esempio n. 4
0
class SurveyDefinition:
    def __init__(self,
                 qualtrics_account_name="cambridge",
                 survey_id=None,
                 correlation_id=None):
        client = SurveyDefinitionsClient(
            qualtrics_account_name=qualtrics_account_name,
            survey_id=survey_id,
            correlation_id=correlation_id,
        )
        response = client.get_survey()
        assert (response["meta"]["httpStatus"] == "200 - OK"
                ), f"Call to Qualtrics API failed with response {response}"
        self.survey_id = survey_id
        self.definition = response["result"]
        self.flow = self.definition["SurveyFlow"]["Flow"]
        self.blocks = self.definition["Blocks"]
        self.questions = self.definition["Questions"]
        self.modified = self.definition["LastModified"]
        self.ddb_client = Dynamodb(stack_name=const.STACK_NAME)
        self.logger = utils.get_logger()
        self.logger.debug(
            "Initialised SurveyDefinition",
            extra={
                "__dict__": self.__dict__,
                "correlation_id": correlation_id,
            },
        )

    @classmethod
    def from_eb_event(cls, event):
        logger = utils.get_logger()
        logger.debug(
            "EB event",
            extra={
                "event": event,
            },
        )
        event_detail = event["detail"]
        try:
            qualtrics_account_name = event_detail.pop("account")
            survey_id = event_detail.pop("survey_id")
        except KeyError as exc:
            raise utils.DetailedValueError(
                f"Mandatory {exc} data not found in source event",
                details={
                    "event": event,
                },
            )
        return cls(
            qualtrics_account_name=qualtrics_account_name,
            survey_id=survey_id,
            correlation_id=event["id"],
        )

    def get_interview_question_list_from_Qualtrics(self):
        def parse_question_html(s):
            text_m = PROMPT_RE.search(s)
            text = text_m.group(1)

            description_m = DESCRIPTION_RE.search(s)
            try:
                description = description_m.group(1)
            except AttributeError:
                description = None
            return text, description

        interview_question_list = list()
        question_counter = 1
        block_ids_flow = [
            x["ID"] for x in self.flow if x["Type"] not in ["Branch"]
        ]  # flow items of Branch type represent survey branching logic
        for block_id in block_ids_flow:
            block = self.blocks[block_id]
            block_name = block["Description"]
            question_ids = [x["QuestionID"] for x in block["BlockElements"]]
            for question_id in question_ids:
                q = self.questions[question_id]
                question_name = q["DataExportTag"]
                question_text_raw = q["QuestionText"]
                try:
                    question_text, question_description = parse_question_html(
                        question_text_raw)
                except AttributeError:  # no match found for PROMPT_RE
                    if SYSTEM_RE.findall(question_text_raw):
                        continue  # this is a system config question; skip
                    else:
                        raise utils.DetailedValueError(
                            "Mandatory prompt div could not be found in interview question",
                            details={
                                "question": question_text_raw,
                                "survey_id": self.survey_id,
                                "question_id": question_id,
                                "question_export_tag": question_name,
                            },
                        )
                question = InterviewQuestion(
                    survey_id=self.survey_id,
                    survey_modified=self.modified,
                    question_id=question_id,
                    question_name=question_name,
                    sequence_no=str(question_counter),
                    block_name=block_name,
                    block_id=block_id,
                    question_text=question_text,
                    question_description=question_description,
                )
                interview_question_list.append(question)
                question_counter += 1
        return interview_question_list

    def ddb_update_interview_questions(self):
        """
        Updates the list of interview questions held in Dynamodb for a particular survey.
        This includes not only adding and updating questions, but also deleting questions that are no longer
        present in the survey
        """
        ddb_question_list = self.ddb_load_interview_questions(self.survey_id)
        survey_question_list = self.get_interview_question_list_from_Qualtrics(
        )
        updated_question_ids = list()
        deleted_question_ids = list()
        for q in survey_question_list:
            self.ddb_client.put_item(
                table_name=const.INTERVIEW_QUESTIONS_TABLE["name"],
                key=q._survey_id,
                key_name=const.INTERVIEW_QUESTIONS_TABLE["partition_key"],
                item_type="interview_question",
                item_details=None,
                item=q.as_dict(),
                update_allowed=True,
                sort_key={
                    const.INTERVIEW_QUESTIONS_TABLE["sort_key"]:
                    q._question_id,
                },
            )
            updated_question_ids.append(q._question_id)

        for q in ddb_question_list:
            question_id = q["question_id"]
            if question_id not in updated_question_ids:
                self.ddb_client.delete_item(
                    table_name=const.INTERVIEW_QUESTIONS_TABLE["name"],
                    key=q["survey_id"],
                    key_name=const.INTERVIEW_QUESTIONS_TABLE["partition_key"],
                    sort_key={
                        const.INTERVIEW_QUESTIONS_TABLE["sort_key"]:
                        question_id,
                    },
                )
                deleted_question_ids.append(question_id)

        return updated_question_ids, deleted_question_ids

    @staticmethod
    def ddb_load_interview_questions(survey_id):
        ddb_client = Dynamodb(stack_name=const.STACK_NAME)
        return ddb_client.query(
            table_name=const.INTERVIEW_QUESTIONS_TABLE["name"],
            KeyConditionExpression="survey_id = :survey_id",
            ExpressionAttributeValues={
                ":survey_id": survey_id,
            },
        )

    @staticmethod
    def get_interview_questions(survey_id):
        """
        Get interview questions for a VIEWS interview. Randomises question order
        within blocks if that is specified by InterviewTask

        Args:
            survey_id: Qualtrics survey id of survey containing the interview questions

        Returns:

        """
        def randomise_questions_check():
            """
            Reads configuration in InterviewTask items containing survey_id to determine whether or not
            interview questions should be randomised.

            Returns:
                True if questions should be randomised; otherwise False
            """
            bool_config_choices = {True, False}
            error_message = (
                f"Conflicting interview task configuration(s) found for survey {survey_id}. "
                f"Random question order specified in some but not all interview tasks using that survey"
            )
            interview_tasks_table = const.InterviewTasksTable()

            # get config for live interviews with questions coming from survey_id
            live_config_matches = interview_tasks_table.query_live_survey_id_index(
                survey_id=survey_id)
            live_random_config_set = set()
            for m in live_config_matches:
                random_config = m.get("live_questions_random", False)
                live_random_config_set.add(random_config)

            # get config for on-demand interviews with questions coming from survey_id
            on_demand_config_matches = (
                interview_tasks_table.query_on_demand_survey_id_index(
                    survey_id=survey_id))
            on_demand_random_config_set = set()
            for m in on_demand_config_matches:
                random_config = m.get("on_demand_questions_random", False)
                on_demand_random_config_set.add(random_config)

            # check for conflicts and return randomise config if there are no issues
            random_config_choices = live_random_config_set.union(
                on_demand_random_config_set)
            random_config_choices_excluding_none = random_config_choices.intersection(
                bool_config_choices)
            assert len(
                random_config_choices_excluding_none) <= 1, error_message
            try:
                return random_config_choices_excluding_none.pop()
            except KeyError:  # no config found; default to non-random
                return False

        interview_questions = SurveyDefinition.ddb_load_interview_questions(
            survey_id)

        try:
            survey_modified = interview_questions[0]["survey_modified"]
        except IndexError:
            raise utils.ObjectDoesNotExistError(
                f"No interview questions found for survey {survey_id}",
                details={})

        block_dict = dict()
        for iq in interview_questions:
            block_id = iq["block_id"]

            try:
                block = block_dict[block_id]
            except KeyError:
                block = {
                    "block_id": block_id,
                    "block_name": iq["block_name"],
                    "questions": list(),
                }

            question = {
                "question_id": iq["question_id"],
                "question_name": iq["question_name"],
                "sequence_no": iq["sequence_no"],
                "question_text": iq["question_text"],
                "question_description": iq["question_description"],
            }

            block["questions"].append(question)
            block_dict[block_id] = block

        # order questions in each block
        randomise = randomise_questions_check()
        for block in block_dict.values():
            if randomise:
                random.shuffle(block["questions"])
            else:
                block["questions"] = sorted(
                    block["questions"], key=lambda k: int(k["sequence_no"]))

        body = {
            "survey_id": survey_id,
            "modified": survey_modified,
            "blocks": list(block_dict.values()),
            "count": len(interview_questions),
        }

        return body