Ejemplo n.º 1
0
def execute_non_query(sql, params, correlation_id=new_correlation_id()):
    """
    Use this method to make changes that will be committed to the database (e.g. UPDATE, DELETE calls)
    """
    logger = get_logger()
    conn = _get_connection(correlation_id)
    sql = minimise_white_space(sql)
    param_str = str(params)
    logger.info(
        "postgres query",
        extra={
            "query": sql,
            "parameters": param_str,
            "correlation_id": correlation_id
        },
    )
    with conn.cursor() as cursor:
        try:
            cursor.execute(sql, params)
            rowcount = cursor.rowcount
            conn.commit()
        except psycopg2.IntegrityError as err:
            errorjson = {
                "error": err.args[0],
                "correlation_id": str(correlation_id)
            }
            raise DetailedIntegrityError("Database integrity error", errorjson)
    return rowcount
Ejemplo n.º 2
0
 def __init__(self, survey_consent_event):
     self.logger = survey_consent_event.get("logger", utils.get_logger())
     self.correlation_id = survey_consent_event.get(
         "correlation_id", utils.new_correlation_id())
     self.consent_dict = json.loads(survey_consent_event["body"])
     self.consent_info_url = self.consent_dict["consent_info_url"]
     del self.consent_dict["consent_info_url"]
     consent_embedded_data_fieldname = "consent_statements"
     self.consent_dict[consent_embedded_data_fieldname] = json.loads(
         self.consent_dict[consent_embedded_data_fieldname])
     self.to_recipient_email = self.consent_dict.get("to_recipient_email")
     try:
         self.template_name = self.consent_dict["template_name"]
     except KeyError:
         self.template_name = DEFAULT_CONSENT_EMAIL_TEMPLATE
     else:
         del self.consent_dict["template_name"]
     try:
         self.consent_dict[
             "consent_datetime"] = qualtrics2thiscovery_timestamp(
                 self.consent_dict["consent_datetime"])
     except KeyError:
         self.consent_dict["consent_datetime"] = str(utils.now_with_tz())
     self.core_api_client = CoreApiClient(
         correlation_id=self.correlation_id)
     self.consent = Consent(core_api_client=self.core_api_client,
                            correlation_id=self.correlation_id)
     self.consent.from_dict(consent_dict=self.consent_dict)
Ejemplo n.º 3
0
 def views_interview_rescheduled_event_from_acuity_info(acuity_info):
     return {
         "version": "0",
         "id": str(utils.new_correlation_id()),
         "detail-type": "interview_rescheduled",
         "detail": {
             "anon_project_specific_user_id":
             acuity_info["forms"][0]["values"][1]["value"],
             "anon_user_task_id":
             acuity_info["forms"][0]["values"][0]["value"],
             "appointment_datetime":
             acuity_info["datetime"],
             "appointment_duration":
             acuity_info["duration"],
             "appointment_id":
             acuity_info["id"],
             "appointment_timezone":
             acuity_info["timezone"],
             "appointment_type":
             acuity_info["type"],
             "appointment_type_id":
             acuity_info["appointmentTypeID"],
             "calendar_id":
             acuity_info["calendarID"],
             "calendar_name":
             acuity_info["calendar"],
             "calendar_timezone":
             acuity_info["calendarTimezone"],
             "interview_id":
             str(utils.new_correlation_id()),
             "interview_room_url":
             "https://www.video.thiscovery.org/",
             "project_task_id":
             "683598e8-435f-4052-a417-f0f6d808373a",
             "room_phone_number":
             "+44 (0)1111 111 111",
             "room_phone_pin":
             "11111111",
         },
         "source": "thiscovery_video",
         "account": "REDACTED",
         "region": "REDACTED",
         "resources": [],
         "time": "2021-02-12T10:57:09Z",
     }
Ejemplo n.º 4
0
def execute_non_query_multiple(sql_iterable,
                               params_iterable,
                               correlation_id=new_correlation_id()):
    """

    Args:
        sql_iterable (tuple, list, etc): iterable containing sql queries to be executed
        params_iterable (tuple, list, etc): iterable containing params for sql queries in sql_iterable
        correlation_id:

    Returns:
        List of number of rows affected by each of the input sql queries

    """
    logger = get_logger()
    conn = _get_connection(correlation_id)
    results = []
    with conn.cursor() as cursor:
        for (sql, params) in zip(sql_iterable, params_iterable):
            sql = minimise_white_space(sql)
            param_str = str(params)
            logger.info(
                "postgres query",
                extra={
                    "query": sql,
                    "parameters": param_str,
                    "correlation_id": correlation_id,
                },
            )

            try:
                cursor.execute(sql, params)
            except psycopg2.IntegrityError as err:
                errorjson = {
                    "error": err.args[0],
                    "correlation_id": str(correlation_id),
                }
                raise DetailedIntegrityError("Database integrity error",
                                             errorjson)
            except Exception as ex:
                raise ex

            rowcount = cursor.rowcount
            logger.info(
                f"postgres query updated {rowcount} rows",
                extra={
                    "query": sql,
                    "parameters": param_str,
                    "correlation_id": correlation_id,
                },
            )
            results.append(rowcount)
    conn.commit()
    return results
Ejemplo n.º 5
0
def execute_query(
        base_sql,
        params=None,
        correlation_id=new_correlation_id(),
        return_json=True,
        jsonize_sql=True,
):
    """
    Use this method to query the database (e.g. using SELECT). Changes will not be committed to the database, so don't use this method for UPDATE and DELETE
    calls.

    Args:
        base_sql:
        params (tuple or list): http://initd.org/psycopg/docs/usage.html#passing-parameters-to-sql-queries
        correlation_id:
        return_json:
        jsonize_sql:

    Returns:

    """
    logger = get_logger()
    # tell sql to create json if that's what's wanted
    if return_json and jsonize_sql:
        sql = _jsonize_sql(base_sql)
    else:
        sql = base_sql
    sql = minimise_white_space(sql)
    param_str = str(params)
    logger.info(
        "postgres query",
        extra={
            "query": sql,
            "parameters": param_str,
            "correlation_id": correlation_id
        },
    )
    conn = _get_connection(correlation_id)
    with conn.cursor() as cursor:
        cursor.execute(sql, params)
        records = cursor.fetchall()
    logger.info(
        "postgres result",
        extra={
            "rows returned": str(len(records)),
            "correlation_id": correlation_id
        },
    )

    if return_json:
        return _get_json_from_tuples(records)
    else:
        return records
Ejemplo n.º 6
0
def execute_query_multiple(
        base_sql_tuple,
        params_tuple=None,
        correlation_id=new_correlation_id(),
        return_json=True,
        jsonize_sql=True,
):
    """
    Use this method to query the database (e.g. using SELECT). Changes will not be committed to the database, so don't use this method for UPDATE and DELETE
    calls.
    """
    logger = get_logger()
    conn = _get_connection(correlation_id)
    if params_tuple is None:
        params_tuple = tuple([None] * len(base_sql_tuple))
    results = []
    with conn.cursor() as cursor:
        for (base_sql, params) in zip(base_sql_tuple, params_tuple):
            # tell sql to create json if that's what's wanted
            if return_json and jsonize_sql:
                sql = _jsonize_sql(base_sql)
            else:
                sql = base_sql
            sql = minimise_white_space(sql)
            param_str = str(params)
            logger.info(
                "postgres query",
                extra={
                    "query": sql,
                    "parameters": param_str,
                    "correlation_id": correlation_id,
                },
            )

            cursor.execute(sql, params)
            records = cursor.fetchall()
            logger.info(
                "postgres result",
                extra={
                    "rows returned": str(len(records)),
                    "correlation_id": correlation_id,
                },
            )

            if return_json:
                results.append(_get_json_from_tuples(records))
            else:
                results.append(records)
    logger.info("Returning multiple results", extra={"results": results})
    return results
Ejemplo n.º 7
0
def execute_jsonpatch(
        id_column,
        id_to_update,
        mappings,
        patch_json,
        modified_time,
        correlation_id=new_correlation_id(),
):
    """

    Args:
        id_column:
        id_to_update:
        mappings:
        patch_json:
        modified_time:
        correlation_id:

    Returns:
        Total number of rows updated in RDS database

    """
    # todo - wrap in transaction if ever extended to multi table updates
    updated_rows = 0
    try:
        tables_to_update, columns_to_update = create_updates_list_from_jsonpatch(
            mappings, patch_json, correlation_id)
        sql_updates = create_sql_from_updates_list(tables_to_update,
                                                   columns_to_update,
                                                   id_column, id_to_update,
                                                   modified_time)
        for (sql_update, params) in sql_updates:
            rowcount = execute_non_query(sql_update, params, correlation_id)
            if rowcount == 0:
                errorjson = {
                    "id_column": id_column,
                    "id_to_update": id_to_update,
                    "sql_update": sql_update,
                    "correlation_id": str(correlation_id),
                }
                raise ObjectDoesNotExistError("user does not exist", errorjson)
            updated_rows += rowcount
        return updated_rows
    except Exception as ex:
        # all exceptions will be dealt with by calling method
        raise ex
Ejemplo n.º 8
0
    def __init__(
        self,
        survey_id,
        anon_project_specific_user_id,
        account,
        project_task_id,
        correlation_id=None,
    ):
        self.survey_id = survey_id
        self.anon_project_specific_user_id = anon_project_specific_user_id
        self.account = account
        self.account_survey_id = f"{account}_{survey_id}"
        self.correlation_id = correlation_id
        if correlation_id is None:
            self.correlation_id = utils.new_correlation_id()

        self.ddb_client = Dynamodb(stack_name=const.STACK_NAME,
                                   correlation_id=self.correlation_id)
        self.project_task_id = project_task_id
Ejemplo n.º 9
0
def process_user_login(notification):
    logger = get_logger()
    correlation_id = new_correlation_id()
    logger.info(
        "Processing user login notification",
        extra={
            "notification": notification,
            "correlation_id": correlation_id
        },
    )
    posting_result = None
    marking_result = None
    try:
        # get basic data out of notification
        login_details = notification["details"]
        hs_client = HubSpotClient(correlation_id=correlation_id,
                                  stack_name=const.STACK_NAME)
        posting_result = hs_client.post_user_login_to_crm(login_details)
        logger.debug(
            "Response from HubSpot API",
            extra={
                "posting_result": posting_result,
                "correlation_id": correlation_id
            },
        )
        if posting_result == http.HTTPStatus.NO_CONTENT:
            marking_result = mark_notification_processed(
                notification, correlation_id, stack_name=const.STACK_NAME)
    except Exception as ex:
        logger.debug("Traceback", extra={"traceback": traceback.format_exc()})
        error_message = str(ex)
        marking_result = mark_notification_failure(notification,
                                                   error_message,
                                                   correlation_id,
                                                   stack_name=const.STACK_NAME)
    finally:
        return posting_result, marking_result
Ejemplo n.º 10
0
def create_updates_list_from_jsonpatch(mappings,
                                       jsonpatch,
                                       correlation_id=new_correlation_id()):
    tables_to_update = set()
    columns_to_update = []
    # process each attribute update and figure out where it belongs in database
    for update in jsonpatch:
        # remove leading '/' if present in jsonpatch
        try:
            if update["path"][0] == "/":
                attribute = update["path"][1:]
            else:
                attribute = update["path"]
        except KeyError:
            errorjson = {
                "update": update,
                "correlation_id": str(correlation_id)
            }
            raise PatchInvalidJsonError("path not found in jsonpatch",
                                        errorjson)

        # get mapping for this particular attribute
        if attribute not in mappings:
            errorjson = {
                "attribute": attribute,
                "correlation_id": str(correlation_id)
            }
            raise PatchAttributeNotRecognisedError(
                "Patch attribute not recognised", errorjson)

        mapping = mappings[attribute]

        try:
            operation = update["op"]
        except KeyError:
            errorjson = {
                "update": update,
                "correlation_id": str(correlation_id)
            }
            raise PatchInvalidJsonError("op not found in jsonpatch", errorjson)

        if operation != "replace":
            errorjson = {
                "operation": operation,
                "correlation_id": str(correlation_id)
            }
            raise PatchOperationNotSupportedError(
                "Patch operation not currently supported", errorjson)

        try:
            value = update["value"]
        except KeyError:
            errorjson = {
                "update": update,
                "correlation_id": str(correlation_id)
            }
            raise PatchInvalidJsonError("value not found in jsonpatch",
                                        errorjson)

        # add table to list that need to be updated
        table_name = mapping["table_name"]
        tables_to_update.add(table_name)

        # add column to list of cols that need to be updated
        column_name = mapping["column_name"]
        columns_to_update.append({
            "table_name": table_name,
            "column_name": column_name,
            "value": value
        })

    return tables_to_update, columns_to_update
Ejemplo n.º 11
0
def run_sql_script_file(sql_script_file, correlation_id=new_correlation_id()):
    sql = get_file_as_string(sql_script_file)
    execute_non_query(sql, None, correlation_id)
Ejemplo n.º 12
0
    def test_20_patch_user_api_ok(self):
        user_id = "d1070e81-557e-40eb-a7ba-b951ddb7ebdc"

        expected_status = HTTPStatus.NO_CONTENT
        user_jsonpatch = [
            {"op": "replace", "path": "/title", "value": "Sir"},
            {"op": "replace", "path": "/first_name", "value": "simon"},
            {"op": "replace", "path": "/last_name", "value": "smith"},
            {"op": "replace", "path": "/email", "value": "*****@*****.**"},
            {"op": "replace", "path": "/auth0_id", "value": "new-auth0-id"},
            {"op": "replace", "path": "/status", "value": "singing"},
            {"op": "replace", "path": "/country_code", "value": "GB-SCT"},
        ]
        body = json.dumps(user_jsonpatch)
        path_parameters = {"id": user_id}

        result = test_patch(
            patch_user_api,
            ENTITY_BASE_URL,
            path_parameters=path_parameters,
            request_body=body,
        )
        result_status = result["statusCode"]

        self.assertEqual(expected_status, result_status)
        # now check database values...
        path_parameters = {"id": user_id}

        expected_body = {
            "id": user_id,
            "created": f"2018-08-17T{tz_hour}:10:56.798192+{tz_offset}",
            "email": "*****@*****.**",
            "title": "Sir",
            "first_name": "simon",
            "last_name": "smith",
            "auth0_id": "new-auth0-id",
            "country_code": "GB-SCT",
            "country_name": "United Kingdom - Scotland",
            "crm_id": None,
            "avatar_string": "ss",
            "status": "singing",
        }

        result = test_get(
            u.get_user_by_id_api, ENTITY_BASE_URL, path_parameters=path_parameters
        )
        result_json = json.loads(result["body"])

        # will test modified separately so extract it from dictionary here
        self.now_datetime_test_and_remove(
            result_json, "modified", tolerance=TIME_TOLERANCE_SECONDS
        )

        # now check that we have a corresponding entity update record
        entity_updates = EntityUpdate.get_entity_updates_for_entity(
            "user", user_id, new_correlation_id()
        )
        self.assertTrue(len(entity_updates) > 0, "No entity update record found")
        if len(entity_updates) > 0:
            # get most recent update record
            last_entity_update = entity_updates[-1]

            # remove from returned value those things we don't want to test
            self.remove_dict_items_to_be_ignored_by_tests(
                last_entity_update, ["id", "modified"]
            )

            # remove and store data items to be tested individually
            # check created datetime - allow up to TIME_TOLERANCE_SECONDS difference
            self.now_datetime_test_and_remove(
                last_entity_update, "created", tolerance=TIME_TOLERANCE_SECONDS
            )

            result_json_reverse_patch = last_entity_update["json_reverse_patch"]
            del last_entity_update["json_reverse_patch"]
            result_json_patch = last_entity_update["json_patch"]
            del last_entity_update["json_patch"]

            # check jsonpatch - compare as lists in case order different
            result_json_patch = json.loads(result_json_patch)
            self.assertCountEqual(user_jsonpatch, result_json_patch)

            # need to compare list objects not strings as elements may be in different order
            result_json_reverse_patch = json.loads(result_json_reverse_patch)
            expected_json_reverse_patch = [
                {"op": "replace", "path": "/first_name", "value": "Altha"},
                {"op": "replace", "path": "/auth0_id", "value": None},
                {"op": "replace", "path": "/title", "value": "Mrs"},
                {"op": "replace", "path": "/last_name", "value": "Alcorn"},
                {"op": "replace", "path": "/status", "value": None},
                {"op": "replace", "path": "/email", "value": "*****@*****.**"},
                {"op": "replace", "path": "/country_code", "value": "GB"},
            ]
            self.assertCountEqual(
                expected_json_reverse_patch, result_json_reverse_patch
            )

            # and finally check what's left
            expected_body = {
                "entity_name": "user",
                "entity_id": user_id,
            }
            self.assertDictEqual(expected_body, last_entity_update)