def decorated(*args, **kwargs): if 'user_name' not in request.args.keys(): error_response = helpers.format_error_message( errors.MissingUserNameInQueryString()) return make_response(error_response, 400) else: return f(*args, **kwargs)
def decorated(*args, **kwargs): limit = request.args.get('limit', None) if limit is not None and int(limit) > 20: error_response = helpers.format_error_message( errors.LimitTooLarge(limit)) return make_response(error_response, 400) return f(*args, **kwargs)
def decorated(*args, **kwargs): result = make_response('message: Unknown issue', 500) try: result = f(*args, **kwargs) except Exception as e: parent_class = errors.BackendDefinedErrors() if isinstance(e, type(parent_class)): message = helpers.format_error_message(e) result = make_response(message, 400) else: result = make_response({'message': f"{e}"}, 500) finally: return result
def insert_deputy_person_warning(db_config, target_db): sirius_details = get_mapping_dict( file_name=mapping_file_name, stage_name="sirius_details", only_complete_fields=False, ) try: deputys_query = ( f'select "id", "c_deputy_no" from {db_config["target_schema"]}.persons ' f"where \"type\" = 'actor_deputy';") deputys_df = pd.read_sql_query(deputys_query, db_config["db_connection_string"]) deputy_warning_query = f""" select "id", "c_deputy_no" from {db_config["target_schema"]}.warnings where casrec_mapping_file_name = 'deputy_violent_warnings_mapping';""" deputy_warning_df = pd.read_sql_query( deputy_warning_query, db_config["db_connection_string"]) deputy_warning_df = deputy_warning_df.merge( deputys_df, how="left", left_on="c_deputy_no", right_on="c_deputy_no", suffixes=["_warning", "_deputy"], ) deputy_warning_df = deputy_warning_df.rename(columns={ "id_warning": "warning_id", "id_deputy": "person_id" }) deputy_warning_df["casrec_details"] = None deputy_warning_df = reapply_datatypes_to_fk_cols( columns=["person_id", "warning_id"], df=deputy_warning_df) target_db.insert_data( table_name=definition["destination_table_name"], df=deputy_warning_df, sirius_details=sirius_details, ) except Exception as e: log.debug( "No data to insert", extra={ "file_name": "", "error": format_error_message(e=e), }, )
def insert_client_person_warning(db_config, target_db): sirius_details = get_mapping_dict( file_name=mapping_file_name, stage_name="sirius_details", only_complete_fields=False, ) try: clients_query = ( f'select "id", "caserecnumber" from {db_config["target_schema"]}.persons ' f"where \"type\" = 'actor_client';") clients_df = pd.read_sql_query(clients_query, db_config["db_connection_string"]) client_warning_query = ( f'select "id", "c_case" from {db_config["target_schema"]}.warnings;' ) client_warning_df = pd.read_sql_query( client_warning_query, db_config["db_connection_string"]) client_warning_df = client_warning_df.merge( clients_df, how="left", left_on="c_case", right_on="caserecnumber", suffixes=["_warning", "_client"], ) client_warning_df = client_warning_df.drop(columns=["caserecnumber"]) client_warning_df = client_warning_df.rename(columns={ "id_warning": "warning_id", "id_client": "person_id" }) client_warning_df["casrec_details"] = None target_db.insert_data( table_name=definition["destination_table_name"], df=client_warning_df, sirius_details=sirius_details, ) except Exception as e: log.debug( "No data to insert", extra={ "file_name": "", "error": format_error_message(e=e), }, )
def decorated(*args, **kwargs): try: limit = request.args.get('limit', None) offset = request.args.get('offset', None) if limit != None or offset != None: for param in params: value = request.args.get(param) param_as_int = int(value) if param_as_int != 0: if float(value) / int(value) != 1.0: raise Exception() if param_as_int < 0: raise Exception() if param_as_int < 1 and param == 'limit': raise Exception() return f(*args, **kwargs) except Exception: error_response = helpers.format_error_message( errors.ParamMustBeInteger()) return make_response(error_response, 400)
def amend_dev_data(db_engine): log.info( "Amending Dev Sirius DB to match preprod - this should NOT run on preprod!" ) dirname = os.path.dirname(__file__) filename = "dev_data_fixes.sql" with open(os.path.join(dirname, filename)) as sql_file: sql = text(sql_file.read()) try: db_engine.execute(sql) except Exception as e: log.error( f"Unable to amend Sirius DB: data probably already exists", extra={ "file_name": "", "error": format_error_message(e=e), }, )
def decorated(*args, **kwargs): if request.method not in allowed_methods: new_error = errors.EndpointMethodNotAllowed() error_message = helpers.format_error_message(new_error) return make_response(error_message, 405) return f(*args, **kwargs)
def insert_data_into_target(db_config, source_db_engine, target_db_engine, table_name, table_details): log.info( f"Inserting new data from {db_config['source_schema']} '{table_name}' table" ) get_cols_query = get_columns_query(table=table_name, schema=db_config["source_schema"]) columns = [ x[0] for x in source_db_engine.execute(get_cols_query).fetchall() ] columns = remove_unecessary_columns(columns=columns) log.verbose(f"columns: {columns}") order_by = (", ".join(table_details["order_by"]) if len(table_details["order_by"]) > 0 else table_details["pk"]) log.verbose(f"order_by: {order_by}") chunk_size = 10000 offset = 0 while True: query = f""" SELECT {', '.join(columns)} FROM {db_config["source_schema"]}.{table_name} WHERE method = 'INSERT' ORDER BY {order_by} LIMIT {chunk_size} OFFSET {offset};; """ log.verbose(f"using source query {query}") data_to_insert = pd.read_sql_query( sql=query, con=db_config["source_db_connection_string"]) insert_statement = create_insert_statement( schema=db_config["target_schema"], table_name=table_name, columns=columns, df=data_to_insert, ) try: target_db_engine.execute(insert_statement) except Exception as e: log.error( f"There was an error inserting {len(data_to_insert)} rows " f"into {db_config['source_schema']}.{table_name}", extra={ "table_name": table_name, "size": len(data_to_insert), "action": "insert", "error": format_error_message(e=e), }, ) os._exit(1) offset += chunk_size log.debug(f"doing offset {offset} for table {table_name}") if len(data_to_insert) < chunk_size: break