def downgrade(): # ### commands auto generated by Alembic - please adjust! ### public_searchkey_business_name = PGFunction( schema="public", signature="searchkey_business_name(actual_name character varying)", definition= "returns character varying\n LANGUAGE plpgsql\nAS $function$\n DECLARE\n v_search_key VARCHAR(150);\n BEGIN\n v_search_key :=\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(\n REGEXP_REPLACE(actual_name,'^0000|^000|^00|^0|^THE | THE |\\([^()]*\\)',''),\n 'CORPORATION|CORP|COMPANY|CO\\.|LTD|INCORPORATED|INC$|INC.|INCORPOREE|LIMITED|LIMITEE|LTEE|LTD|ASSOCIATION$|ASSOC$|ASSN$|NON PERSONAL LIABILITY$|UNLIMITED LIABILITY COMPANY|N P L$|NPL$|PARTNERSHIP|SOCIETY$|SOC$',''),\n 'BRITISH COLUMBIA|BRITISHCOLUMBIA','BC'),\n '&','AND'),\n '#','NUMBER'),\n '1','ONE'),\n '2','TWO'),\n '3','THREE'),\n '4','FOUR'),\n '5','FIVE'),\n '6','SIX'),\n '7','SEVEN'),\n '8','EIGHT'),\n '9','NINE'),\n '0','ZERO'),\n 'TEN','ONEZERO'),\n 'TWENTY','TWOZERO'),\n 'THIRTY','THREEERO'),\n 'FORTY','FOURZERO'),\n 'FOURTY','FOURZERO'),\n 'FIFTY','FIVEZERO'),\n 'SIXTY','SIXZERO'),\n 'SEVENTY','SEVENZERO'),\n 'EIGHTY','EIGHTZERO'),\n 'NINETY','NINEZERO'),\n '[^0-9A-Za-z]','','gi');\n RETURN v_search_key;\n END\n; \n$function$" ) op.replace_entity(public_searchkey_business_name) public_match_individual_name = PGFunction( schema="public", signature= "match_individual_name(lastname character varying, firstname character varying)", definition= "returns integer[]\n LANGUAGE plpgsql\nAS $function$\n DECLARE\n v_ids integer ARRAY;\n v_lastname_key VARCHAR(50);\n v_last1 VARCHAR(50);\n v_last2 VARCHAR(50);\n v_firstname_key VARCHAR(50);\n v_first1 VARCHAR(50);\n v_first2 VARCHAR(50);\n v_first3 VARCHAR(50);\n BEGIN\n v_lastname_key := searchkey_last_name(lastname);\n v_last1 = split_part(v_lastname_key, ' ', 1);\n v_last2 = split_part(v_lastname_key, ' ', 2); -- May be null\n v_firstname_key := searchkey_first_name(firstname);\n v_first1 = split_part(v_firstname_key, ' ', 1);\n v_first2 = split_part(v_firstname_key, ' ', 2); -- May be null\n v_first3 = split_part(v_firstname_key, ' ', 3); -- May be null\n\n IF (LENGTH(v_last2) < 1) THEN\n v_last2 := null;\n END IF;\n IF (LENGTH(v_first2) < 1) THEN\n v_first2 := null;\n END IF;\n IF (LENGTH(v_first3) < 1) THEN\n v_first3 := null;\n END IF;\n SET pg_trgm.similarity_threshold = 0.8;\n\n -- Replace where clause\\: Oracle uses UTL_MATCH.JARO_WINKLER_SIMILARITY\n SELECT array_agg(id)\n INTO v_ids\n FROM parties p\n WHERE registration_id_end IS NULL AND\n party_type = 'DI' AND\n (\n (p.last_name_key % v_lastname_key OR\n ((p.last_name_key1 = v_last1 OR (v_last2 IS NOT NULL AND v_last2 = p.last_name_key1)) OR\n (p.last_name_key2 IS NOT NULL AND (p.last_name_key2 = v_last1 OR (v_last2 IS NOT NULL AND v_last2 = p.last_name_key2)))\n )\n ) AND\n (\n p.first_name_key % v_firstname_key OR\n (p.first_name_key1 = v_first1 OR\n (v_first2 IS NOT NULL AND v_first2 = p.first_name_key1) OR\n (v_first3 IS NOT NULL AND v_first3 = p.first_name_key1)) OR\n (p.first_name_key2 IS NOT NULL AND \n (p.first_name_key2 = v_first1 OR \n (v_first2 IS NOT NULL AND v_first2 = p.first_name_key2) OR\n (v_first3 IS NOT NULL AND v_first3 = p.first_name_key2))) OR\n (p.first_name_key3 IS NOT NULL AND \n (p.first_name_key3 = v_first1 OR \n (v_first2 IS NOT NULL AND v_first2 = p.first_name_key3) OR\n (v_first3 IS NOT NULL AND v_first3 = p.first_name_key3))) OR\n (searchkey_nickname_match2(p.first_name_key1, p.first_name_key2, p.first_name_key3, v_first1, v_first2, v_first3) > 0)\n )\n );\n RETURN v_ids;\n END\n; \n$function$" ) op.replace_entity(public_match_individual_name) op.drop_index(op.f('ix_trust_indentures_registration_id_end'), table_name='trust_indentures') op.drop_index(op.f('ix_trust_indentures_registration_id'), table_name='trust_indentures') op.drop_index(op.f('ix_trust_indentures_financing_id'), table_name='trust_indentures') op.drop_index(op.f('ix_serial_collateral_registration_id_end'), table_name='serial_collateral') op.drop_index(op.f('ix_serial_collateral_registration_id'), table_name='serial_collateral') op.drop_index(op.f('ix_serial_collateral_mhr_number'), table_name='serial_collateral') op.drop_index(op.f('ix_serial_collateral_financing_id'), table_name='serial_collateral') op.drop_column('search_requests', 'user_id') op.drop_index(op.f('ix_registrations_financing_id'), table_name='registrations') op.drop_index(op.f('ix_registrations_draft_id'), table_name='registrations') op.drop_index(op.f('ix_parties_registration_id_end'), table_name='parties') op.drop_index(op.f('ix_parties_registration_id'), table_name='parties') op.drop_index(op.f('ix_parties_middle_initial'), table_name='parties') op.drop_index(op.f('ix_parties_financing_id'), table_name='parties') op.drop_index(op.f('ix_parties_branch_id'), table_name='parties') op.drop_index(op.f('ix_parties_address_id'), table_name='parties') op.drop_column('parties', 'email_address') op.drop_index(op.f('ix_general_collateral_registration_id_end'), table_name='general_collateral') op.drop_index(op.f('ix_general_collateral_registration_id'), table_name='general_collateral') op.drop_index(op.f('ix_general_collateral_financing_id'), table_name='general_collateral') op.drop_index(op.f('ix_court_orders_registration_id'), table_name='court_orders') op.drop_index(op.f('ix_client_codes_historical_users_id'), table_name='client_codes_historical') op.drop_index(op.f('ix_client_codes_historical_branch_id'), table_name='client_codes_historical') op.drop_index(op.f('ix_client_codes_historical_address_id'), table_name='client_codes_historical') op.drop_index(op.f('ix_client_codes_users_id'), table_name='client_codes') op.drop_index(op.f('ix_client_codes_address_id'), table_name='client_codes')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('drafts', sa.Column('user_id', sa.String(length=1000), nullable=True)) public_match_individual_name = PGFunction( schema="public", signature= "match_individual_name(lastname IN VARCHAR, firstname IN VARCHAR)", definition= "RETURNS int[]\n LANGUAGE plpgsql\n AS\n $$\n DECLARE\n v_ids integer ARRAY;\n BEGIN\n SET pg_trgm.word_similarity_threshold = 0.4;\n \n SELECT array_agg(p.id)\n INTO v_ids\n FROM parties p\n WHERE p.registration_id_end IS NULL\n AND p.party_type = 'DI'\n AND lastname <% p.last_name_key\n AND ((firstname <% p.first_name_key AND word_similarity(firstname, p.first_name_key) >= .50) OR\n (firstname <% p.middle_initial AND word_similarity(firstname, p.middle_initial) >= .50) OR\n p.first_name_key IN (SELECT n.name \n FROM nicknames n \n WHERE n.name_id IN (SELECT n2.name_id\n FROM nicknames n2\n WHERE n2.name = firstname))); \n RETURN v_ids;\n END\n ; \n $$" ) op.replace_entity(public_match_individual_name) op.create_table( 'previous_financing_statements', sa.Column('financing_id', sa.Integer(), nullable=False), sa.Column('registration_type', sa.String(length=30), nullable=False), sa.Column('cb_date', sa.DateTime(), nullable=True), sa.Column('cb_number', sa.String(length=10), nullable=True), sa.Column('cr_date', sa.DateTime(), nullable=True), sa.Column('cr_number', sa.String(length=10), nullable=True), sa.Column('mhr_date', sa.DateTime(), nullable=True), sa.Column('mhr_number', sa.String(length=10), nullable=True), sa.ForeignKeyConstraint( ['financing_id'], ['financing_statements.id'], ), sa.PrimaryKeyConstraint('financing_id'))
def sql_function_entities(): sql_function_entities = [] for p in Path(str(Path().resolve()) + "/src/db/sql/functions").rglob('*.sql'): pg_function_entitity = PGFunction.from_sql(p.read_text()) sql_function_entities.append(pg_function_entitity) return sql_function_entities
def collect_all_db_entities(sess: Session) -> List[ReplaceableEntity]: """Collect all entities from the database""" return [ *PGFunction.from_database(sess, "%"), *PGTrigger.from_database(sess, "%"), *PGView.from_database(sess, "%"), *PGMaterializedView.from_database(sess, "%"), ]
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### basic_poi_aoi_visualization = PGFunction( schema="basic", signature= "poi_aoi_visualization(user_id_input integer, scenario_id_input integer, active_upload_ids integer[], active_study_area_id integer)", definition= "returns TABLE(id integer, uid text, category text, name text, opening_hours text, street text, housenumber text, zipcode text, edit_type text, geom geometry)\n LANGUAGE plpgsql\nAS $function$\nDECLARE \t\n\taoi_categories TEXT[]; \n\tpoi_categories jsonb = basic.poi_categories(user_id_input);\n\tdata_upload_poi_categories TEXT[] = '{}'::TEXT[];\n\tcombined_poi_categories text[];\n\texcluded_pois_id text[] := ARRAY[]::text[]; \n\tbuffer_geom_study_area geometry; \nBEGIN\n\tdata_upload_poi_categories = basic.poi_categories_data_uploads(user_id_input);\n\n\t/*Get combined poi categories*/\n\tSELECT ARRAY_AGG(c.category)\n\tINTO combined_poi_categories\n\tFROM \t\n\t(\n\t\tSELECT jsonb_array_elements_text(poi_categories -> 'true') category\n\t\tUNION ALL \n\t\tSELECT jsonb_array_elements_text(poi_categories -> 'false') category\n\t) c;\n\t\n\tRAISE NOTICE '%', excluded_pois_id; \n\tRAISE NOTICE '%', active_upload_ids ; \n\tRAISE NOTICE '%', combined_poi_categories ; \n\n\n\t/*Prepare AOI categories*/\n\tDROP TABLE IF EXISTS aoi_groups_default; \n\tCREATE TEMP TABLE aoi_groups_default AS \n\tWITH aoi_groups AS \n\t(\n\t\tSELECT jsonb_array_elements(basic.select_customization('aoi_groups')) aoi_group\n\t)\n\tSELECT jsonb_array_elements(p.aoi_group -> jsonb_object_keys(p.aoi_group) -> 'children') AS aoi_category \n\tFROM aoi_groups p;\n\n\tSELECT ARRAY_AGG(object_keys) AS aoi_category\n\tINTO aoi_categories\n\tFROM aoi_groups_default p, LATERAL jsonb_object_keys(p.aoi_category) object_keys; \n\t\n\t/*Check if POI scenario*/\n\tIF scenario_id_input <> 0 THEN \n\t\texcluded_pois_id = basic.modified_pois(scenario_id_input);\n\tEND IF; \n\t/*Buffer study area to avoid border effects*/\n\tbuffer_geom_study_area = (SELECT buffer_geom_heatmap AS geom FROM basic.study_area s WHERE s.id = active_study_area_id);\n\n RETURN query\n \tSELECT p.id, p.uid, p.category, p.name, p.opening_hours, p.street, p.housenumber, p.zipcode, NULL AS edit_type, p.geom \n\tFROM basic.poi p\n\tWHERE p.category IN (SELECT UNNEST(combined_poi_categories))\n\tAND p.uid NOT IN (SELECT UNNEST(excluded_pois_id))\n\tAND p.geom && buffer_geom_study_area\n\tAND p.category NOT IN (SELECT UNNEST(data_upload_poi_categories));\n\t\n\tRETURN query \n\tSELECT p.id, p.uid, p.category, p.name, p.opening_hours, p.street, p.housenumber, p.zipcode, NULL AS edit_type, p.geom \n\tFROM customer.poi_user p\n\tWHERE p.category IN (SELECT UNNEST(combined_poi_categories))\n\tAND p.data_upload_id IN (SELECT UNNEST(active_upload_ids))\n\tAND p.uid NOT IN (SELECT UNNEST(excluded_pois_id))\n\tAND p.geom && buffer_geom_study_area;\n\t\n\tRETURN query \n\t/*No scenarios nor aoi_user is implemented at the moment*/\n\tSELECT p.id, NULL, p.category, p.name, p.opening_hours, NULL AS street, NULL AS housenumber, NULL AS zipcode, NULL AS edit_type, p.geom\n\tFROM basic.aoi p \n\tWHERE p.category IN (SELECT UNNEST(aoi_categories))\n\tAND p.geom && buffer_geom_study_area; \n\t\n\tIF scenario_id_input <> 0 THEN \n\t \tRETURN query \n\t \tSELECT p.id, p.uid, p.category, p.name, p.opening_hours, p.street, p.housenumber, p.zipcode, p.edit_type, p.geom \n\t\tFROM customer.poi_modified p\n\t\tWHERE p.category IN (SELECT UNNEST(combined_poi_categories))\n\t\tAND p.geom && buffer_geom_study_area\n\t\tAND p.scenario_id = scenario_id_input; \n\t \t\n\t\tRETURN query\n\t \tSELECT p.id, p.uid, p.category, p.name, p.opening_hours, p.street, p.housenumber, p.zipcode, 'd' AS edit_type, p.geom \n\t\tFROM basic.poi p\n\t\tWHERE p.category IN (SELECT UNNEST(combined_poi_categories))\n\t\tAND p.uid IN (SELECT UNNEST(excluded_pois_id))\n\t\tAND p.geom && buffer_geom_study_area\n\t\tAND p.category NOT IN (SELECT UNNEST(data_upload_poi_categories));\n\t\n\t\tRETURN query \n\t\tSELECT p.id, p.uid, p.category, p.name, p.opening_hours, p.street, p.housenumber, p.zipcode, 'd' AS edit_type, p.geom \n\t\tFROM customer.poi_user p\n\t\tWHERE p.category IN (SELECT UNNEST(combined_poi_categories))\n\t\tAND p.data_upload_id IN (SELECT UNNEST(active_upload_ids))\n\t\tAND p.uid IN (SELECT UNNEST(excluded_pois_id))\n\t\tAND p.geom && buffer_geom_study_area;\n\tEND IF; \nEND ;\n$function$" ) op.replace_entity(basic_poi_aoi_visualization) basic_prepare_heatmap_local_accessibility = PGFunction( schema="basic", signature= "prepare_heatmap_local_accessibility(amenities_json jsonb, user_id_input integer, active_study_area_id integer, modus_input text DEFAULT 'default'::text, scenario_id_input integer DEFAULT 0, data_upload_ids integer[] DEFAULT '{}'::integer[])", definition= "returns TABLE(grid_visualization_id bigint, accessibility_index bigint)\n LANGUAGE plpgsql\nAS $function$\nDECLARE\n\tarray_amenities text[];\n\tpois_one_entrance jsonb := basic.poi_categories(user_id_input) -> 'false';\n\tpois_more_entrances jsonb := basic.poi_categories(user_id_input) -> 'true';\n\tsensitivities integer[]; \n\ttranslation_sensitivities jsonb;\n\texcluded_poi_uids text[] := '{}'::TEXT[];\nBEGIN\n \t\n\tSELECT ARRAY_AGG(s.sensitivity::integer) \n\tINTO sensitivities \n\tFROM (\n\t\tSELECT jsonb_array_elements_text(basic.select_customization('heatmap_sensitivities')) sensitivity \n\t) s;\n\n\tSELECT jsonb_agg(a.category)\n\tINTO pois_one_entrance \n\tFROM jsonb_object_keys(amenities_json) AS a(category), jsonb_array_elements_text(pois_one_entrance) AS o(category) \n\tWHERE a.category = o.category; \n\t\n\tSELECT jsonb_agg(a.category)\n\tINTO pois_more_entrances \n\tFROM jsonb_object_keys(amenities_json) AS a(category), jsonb_array_elements_text(pois_more_entrances) AS o(category) \n\tWHERE a.category = o.category; \n\n\tSELECT jsonb_object_agg(k, (sensitivities # (v ->> 'sensitivity')::integer)::smallint)\n\tINTO translation_sensitivities\n\tFROM jsonb_each(amenities_json) AS u(k, v);\n\n\tIF modus_input <> 'default' AND scenario_id_input <> 0 THEN \n\t\texcluded_poi_uids = basic.modified_pois(scenario_id_input); \n\tEND IF; \n\n\tRETURN query\n\tSELECT u.grid_visualization_id, (u.accessibility_index * (amenities_json -> x.category ->> 'weight')::SMALLINT)::bigint AS accessibility_index \n\tFROM (\n\t\tSELECT grid_visualization_ids, accessibility_indices[(translation_sensitivities ->> p.category)::integer\\:(translation_sensitivities ->> p.category)::integer][1\\:], p.category\n\t\tFROM customer.reached_poi_heatmap r, \n\t\t(\n\t\t\tSELECT p.uid, p.category \n\t\t\tFROM basic.study_area s, basic.poi p \n\t\t\tWHERE ST_Intersects(p.geom, s.buffer_geom_heatmap) \n\t\t\tAND s.id = active_study_area_id\n\t\t\tAND p.category IN (SELECT jsonb_array_elements_text(pois_one_entrance))\n\t\t\tAND p.uid NOT IN (SELECT UNNEST(excluded_poi_uids))\n\t\t) p \n\t\tWHERE p.uid = r.poi_uid \n\t\tAND r.scenario_id IS NULL \n\t)x, UNNEST(x.grid_visualization_ids, x.accessibility_indices) AS u(grid_visualization_id, accessibility_index);\n\t\n\tRETURN query \n\tSELECT u.grid_visualization_id, (max(u.accessibility_index) * (amenities_json -> x.category ->> 'weight')::SMALLINT)::bigint AS accessibility_index \n\tFROM (\n\t\tSELECT grid_visualization_ids, accessibility_indices[(translation_sensitivities ->> p.category)::integer\\:(translation_sensitivities ->> p.category)::integer][1\\:], \n\t\tp.category, p.name\n\t\tFROM customer.reached_poi_heatmap r, \n\t\t(\n\t\t\tSELECT p.uid, p.category, p.name \n\t\t\tFROM basic.study_area s, basic.poi p \n\t\t\tWHERE ST_Intersects(p.geom, s.buffer_geom_heatmap) \n\t\t\tAND s.id = active_study_area_id\n\t\t\tAND p.uid NOT IN (SELECT UNNEST(excluded_poi_uids))\n\t\t\tAND p.category IN (SELECT jsonb_array_elements_text(pois_more_entrances))\n\t\t) p \n\t\tWHERE p.uid = r.poi_uid \n\t\tAND r.scenario_id IS NULL \n\t)x, UNNEST(x.grid_visualization_ids, x.accessibility_indices) AS u(grid_visualization_id, accessibility_index)\n\tGROUP BY u.grid_visualization_id, x.category, x.name; \n\t\n\tIF data_upload_ids <> '{}'::integer[] THEN \n\t\tRETURN query\n\t\tSELECT u.grid_visualization_id, (u.accessibility_index * (amenities_json -> x.category ->> 'weight')::SMALLINT)::bigint AS accessibility_index \n\t\tFROM (\n\t\t\tSELECT grid_visualization_ids, accessibility_indices[(translation_sensitivities ->> p.category)::integer\\:(translation_sensitivities ->> p.category)::integer][1\\:], p.category\n\t\t\tFROM customer.reached_poi_heatmap r, \n\t\t\t(\n\t\t\t\tSELECT p.uid, p.category \n\t\t\t\tFROM basic.study_area s, customer.poi_user p \n\t\t\t\tWHERE ST_Intersects(p.geom, s.buffer_geom_heatmap) \n\t\t\t\tAND s.id = active_study_area_id\n\t\t\t\tAND p.data_upload_id IN (SELECT UNNEST(data_upload_ids))\n\t\t\t\tAND p.category IN (SELECT jsonb_array_elements_text(pois_one_entrance))\n\t\t\t\tAND p.uid NOT IN (SELECT UNNEST(excluded_poi_uids))\n\t\t\t) p \n\t\t\tWHERE p.uid = r.poi_uid \n\t\t\tAND r.scenario_id IS NULL \n\t\t)x, UNNEST(x.grid_visualization_ids, x.accessibility_indices) AS u(grid_visualization_id, accessibility_index);\n\t\n\t\tRETURN query \n\t\tSELECT u.grid_visualization_id, (max(u.accessibility_index) * (amenities_json -> x.category ->> 'weight')::SMALLINT)::bigint AS accessibility_index \n\t\tFROM (\n\t\t\tSELECT grid_visualization_ids, accessibility_indices[(translation_sensitivities ->> p.category)::integer\\:(translation_sensitivities ->> p.category)::integer][1\\:], \n\t\t\tp.category, p.name\n\t\t\tFROM customer.reached_poi_heatmap r, \n\t\t\t(\n\t\t\t\tSELECT p.uid, p.category, p.name \n\t\t\t\tFROM basic.study_area s, customer.poi_user p \n\t\t\t\tWHERE ST_Intersects(p.geom, s.buffer_geom_heatmap) \n\t\t\t\tAND s.id = active_study_area_id\n\t\t\t\tAND p.uid NOT IN (SELECT UNNEST(excluded_poi_uids))\n\t\t\t\tAND p.category IN (SELECT jsonb_array_elements_text(pois_more_entrances))\n\t\t\t\tAND p.data_upload_id IN (SELECT UNNEST(data_upload_ids))\n\t\t\t) p \n\t\t\tWHERE p.uid = r.poi_uid \t\n\t\t\tAND r.scenario_id IS NULL \n\t\t)x, UNNEST(x.grid_visualization_ids, x.accessibility_indices) AS u(grid_visualization_id, accessibility_index)\n\t\tGROUP BY u.grid_visualization_id, x.category, x.name; \t\n\tEND IF;\n\t\n\tIF modus_input <> 'default' AND scenario_id_input <> 0 THEN \n\t\tRETURN query \n\t\tSELECT u.grid_visualization_id, (u.accessibility_index * (amenities_json -> x.category ->> 'weight')::SMALLINT)::bigint AS accessibility_index \n\t\tFROM (\n\t\t\tSELECT grid_visualization_ids, accessibility_indices[(translation_sensitivities ->> p.category)::integer\\:(translation_sensitivities ->> p.category)::integer][1\\:], p.category\n\t\t\tFROM customer.reached_poi_heatmap r, \n\t\t\t(\n\t\t\t\tSELECT p.uid, p.category, p.scenario_id \n\t\t\t\tFROM basic.study_area s, customer.poi_modified p \n\t\t\t\tWHERE ST_Intersects(p.geom, s.buffer_geom_heatmap) \n\t\t\t\tAND s.id = active_study_area_id\n\t\t\t\tAND p.edit_type <> 'd'\n\t\t\t\tAND p.scenario_id = scenario_id_input \n\t\t\t) p \n\t\t\tWHERE p.uid = r.poi_uid \n\t\t\tAND r.scenario_id = scenario_id_input \n\t\t\tAND p.category IN (SELECT jsonb_array_elements_text(pois_one_entrance))\n\t\t)x, UNNEST(x.grid_visualization_ids, x.accessibility_indices) AS u(grid_visualization_id, accessibility_index);\n\t\t\n\t\tRETURN query \n\t\tSELECT u.grid_visualization_id, (max(u.accessibility_index) * (amenities_json -> x.category ->> 'weight')::SMALLINT)::bigint AS accessibility_index \n\t\tFROM (\n\t\t\tSELECT grid_visualization_ids, accessibility_indices[(translation_sensitivities ->> p.category)::integer\\:(translation_sensitivities ->> p.category)::integer][1\\:], \n\t\t\tp.category, p.name\n\t\t\tFROM customer.reached_poi_heatmap r, \n\t\t\t(\n\t\t\t\tSELECT p.uid, p.category, p.name, p.scenario_id FROM basic.study_area s, customer.poi_modified p \n\t\t\t\tWHERE ST_Intersects(p.geom, s.buffer_geom_heatmap) \n\t\t\t\tAND s.id = active_study_area_id\n\t\t\t\tAND p.edit_type <> 'd'\n\t\t\t\tAND p.scenario_id = scenario_id_input \n\t\t\t) p \n\t\t\tWHERE p.uid = r.poi_uid \n\t\t\tAND r.scenario_id = scenario_id_input \n\t\t\tAND p.category IN (SELECT jsonb_array_elements_text(pois_more_entrances))\t\n\t\t)x, UNNEST(x.grid_visualization_ids, x.accessibility_indices) AS u(grid_visualization_id, accessibility_index)\n\t\tGROUP BY u.grid_visualization_id, x.category, x.name; \n\tEND IF; \n\t\nEND;\n$function$" ) op.replace_entity(basic_prepare_heatmap_local_accessibility)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### basic_poi_aoi_visualization = PGFunction( schema="basic", signature= "poi_aoi_visualization(user_id_input integer, scenario_id_input integer, active_upload_ids integer[], active_study_area_id integer)", definition= "returns TABLE(id integer, uid text, category text, name text, opening_hours text, street text, housenumber text, zipcode text, edit_type text, geom geometry)\n LANGUAGE plpgsql\nAS $function$\nDECLARE \t\n\taoi_categories TEXT[]; \n\tpoi_categories jsonb = basic.poi_categories(user_id_input);\n\tdata_upload_poi_categories TEXT[] = basic.poi_categories_data_uploads(user_id_input);\n\tcombined_poi_categories text[];\n\texcluded_pois_id text[] := ARRAY[]::text[]; \n\tbuffer_geom_study_area geometry; \nBEGIN\n\t/*Get combined poi categories*/\n\tSELECT ARRAY_AGG(c.category)\n\tINTO combined_poi_categories\n\tFROM \t\n\t(\n\t\tSELECT jsonb_array_elements_text(poi_categories -> 'true') category\n\t\tUNION ALL \n\t\tSELECT jsonb_array_elements_text(poi_categories -> 'false') category\n\t) c;\n\n\t/*Prepare AOI categories*/\n\tDROP TABLE IF EXISTS aoi_groups_default; \n\tCREATE TEMP TABLE aoi_groups_default AS \n\tWITH aoi_groups AS \n\t(\n\t\tSELECT jsonb_array_elements(basic.select_customization('aoi_groups')) aoi_group\n\t)\n\tSELECT jsonb_array_elements(p.aoi_group -> jsonb_object_keys(p.aoi_group) -> 'children') AS aoi_category \n\tFROM aoi_groups p;\n\n\tSELECT ARRAY_AGG(object_keys) AS aoi_category\n\tINTO aoi_categories\n\tFROM aoi_groups_default p, LATERAL jsonb_object_keys(p.aoi_category) object_keys; \n\t\n\t/*Check if POI scenario*/\n\tIF scenario_id_input <> 0 THEN \n\t\texcluded_pois_id = basic.modified_pois(scenario_id_input);\n\tEND IF; \n\t/*Buffer study area to avoid border effects*/\n\tbuffer_geom_study_area = (SELECT buffer_geom_heatmap AS geom FROM basic.study_area s WHERE s.id = active_study_area_id);\n\n RETURN query\n \tSELECT p.id, p.uid, p.category, p.name, p.opening_hours, p.street, p.housenumber, p.zipcode, NULL AS edit_type, p.geom \n\tFROM basic.poi p\n\tWHERE p.category IN (SELECT UNNEST(combined_poi_categories))\n\tAND p.uid NOT IN (SELECT UNNEST(excluded_pois_id))\n\tAND p.geom && buffer_geom_study_area\n\tAND p.category NOT IN (SELECT UNNEST(data_upload_poi_categories));\n\t\n\tRETURN query \n\tSELECT p.id, p.uid, p.category, p.name, p.opening_hours, p.street, p.housenumber, p.zipcode, NULL AS edit_type, p.geom \n\tFROM customer.poi_user p\n\tWHERE p.category IN (SELECT UNNEST(combined_poi_categories))\n\tAND p.data_upload_id IN (SELECT UNNEST(active_upload_ids))\n\tAND p.uid NOT IN (SELECT UNNEST(excluded_pois_id))\n\tAND p.geom && buffer_geom_study_area;\n\t\n\tRETURN query \n\t/*No scenarios nor aoi_user is implemented at the moment*/\n\tSELECT p.id, NULL, p.category, p.name, p.opening_hours, NULL AS street, NULL AS housenumber, NULL AS zipcode, NULL AS edit_type, p.geom\n\tFROM basic.aoi p \n\tWHERE p.category IN (SELECT UNNEST(aoi_categories))\n\tAND p.geom && buffer_geom_study_area; \n\t\n\tIF scenario_id_input <> 0 THEN \n\t \tRETURN query \n\t \tSELECT p.id, p.uid, p.category, p.name, p.opening_hours, p.street, p.housenumber, p.zipcode, p.edit_type, p.geom \n\t\tFROM customer.poi_modified p\n\t\tWHERE p.category IN (SELECT UNNEST(combined_poi_categories))\n\t\tAND p.geom && buffer_geom_study_area\n\t\tAND p.scenario_id = scenario_id_input; \n\t \t\n\t\tRETURN query\n\t \tSELECT p.id, p.uid, p.category, p.name, p.opening_hours, p.street, p.housenumber, p.zipcode, 'd' AS edit_type, p.geom \n\t\tFROM basic.poi p\n\t\tWHERE p.category IN (SELECT UNNEST(combined_poi_categories))\n\t\tAND p.uid IN (SELECT UNNEST(excluded_pois_id))\n\t\tAND p.geom && buffer_geom_study_area\n\t\tAND p.category NOT IN (SELECT UNNEST(data_upload_poi_categories));\n\t\n\t\tRETURN query \n\t\tSELECT p.id, p.uid, p.category, p.name, p.opening_hours, p.street, p.housenumber, p.zipcode, 'd' AS edit_type, p.geom \n\t\tFROM customer.poi_user p\n\t\tWHERE p.category IN (SELECT UNNEST(combined_poi_categories))\n\t\tAND p.data_upload_id IN (SELECT UNNEST(active_upload_ids))\n\t\tAND p.uid IN (SELECT UNNEST(excluded_pois_id))\n\t\tAND p.geom && buffer_geom_study_area;\n\tEND IF; \nEND ;\n$function$" ) op.replace_entity(basic_poi_aoi_visualization) basic_poi_categories_data_uploads = PGFunction( schema="basic", signature="poi_categories_data_uploads(user_id_input integer)", definition= "returns text[]\n LANGUAGE plpgsql\nAS $function$\nDECLARE \n\tupload_ids integer[];\n\tarea_id integer;\n\tupload_id integer; \n\tpoi_user_category TEXT;\n\tcategories text[] := '{}'::TEXT[];\nBEGIN \n\t\n\t\n\tSELECT u.active_data_upload_ids, u.active_study_area_id \n\tINTO upload_ids, area_id \n\tFROM customer.USER u \n\tWHERE u.id = 4;\n\n\tFOREACH upload_id IN ARRAY upload_ids \n\tLOOP\n\t\tSELECT category \n\t\tINTO poi_user_category \n\t\tFROM customer.poi_user p, customer.data_upload d\n\t\tWHERE p.data_upload_id = upload_id \n\t\tAND p.data_upload_id = d.id \n\t\tAND d.study_area_id = area_id \n\t\tLIMIT 1; \n\t\t\n\t\tcategories = array_append(categories, poi_user_category );\n\t\t\t\n\tEND LOOP;\n\t\n\tRETURN categories; \n\nEND ;\n$function$" ) op.replace_entity(basic_poi_categories_data_uploads) op.drop_column('layer_library', 'min_resolution', schema='customer') op.drop_column('layer_library', 'max_resolution', schema='customer')
def to_upper(): return PGFunction( schema="public", signature="to_upper(some_text text)", definition=""" returns text as $$ select upper(some_text) || 'abc' $$ language SQL; """, )
def test_pg_function_from_sql_file_valid() -> None: SQL = """ CREATE OR REPLACE FUNCTION public.to_upper(some_text text) RETURNS TEXT AS $$ SELECT upper(some_text) $$ language SQL; """ func = PGFunction.from_sql(SQL) assert func.schema == "public"
def test_plpgsql_colon_esacpe(engine) -> None: # PGFunction.__init__ overrides colon escapes for plpgsql # because := should not be escaped for sqlalchemy.text # if := is escaped, an exception would be raised PLPGSQL_FUNC = PGFunction( schema="public", signature="some_func(some_text text)", definition=""" returns text as $$ declare copy_o_text text; begin copy_o_text := some_text; return copy_o_text; end; $$ language plpgsql """, ) register_entities([PLPGSQL_FUNC], entity_types=[PGFunction]) run_alembic_command( engine=engine, command="revision", command_kwargs={ "autogenerate": True, "rev_id": "1", "message": "create" }, ) migration_create_path = TEST_VERSIONS_ROOT / "1_create.py" with migration_create_path.open() as migration_file: migration_contents = migration_file.read() assert "op.create_entity" in migration_contents assert "op.drop_entity" in migration_contents assert "op.replace_entity" not in migration_contents assert "from alembic_utils.pg_function import PGFunction" in migration_contents # Execute upgrade run_alembic_command(engine=engine, command="upgrade", command_kwargs={"revision": "head"}) # Execute Downgrade run_alembic_command(engine=engine, command="downgrade", command_kwargs={"revision": "base"})
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('drafts', 'user_id') public_match_individual_name = PGFunction( schema="public", signature= "match_individual_name(lastname character varying, firstname character varying)", definition= "returns integer[]\n LANGUAGE plpgsql\nAS $function$\n DECLARE\n v_ids integer ARRAY;\n BEGIN\n -- SET pg_trgm.word_similarity_threshold = 0.8;\n -- SET pg_trgm.similarity_threshold = 0.8;\n \n SELECT array_agg(p.id)\n INTO v_ids\n FROM parties p\n WHERE p.registration_id_end IS NULL\n AND p.party_type = 'DI'\n AND lastname <% p.last_name_key\n AND word_similarity(lastname, p.last_name_key) >= .40\n AND ((firstname <% p.first_name_key AND word_similarity(firstname, p.first_name_key) >= .50) OR\n (firstname <% p.middle_initial AND word_similarity(firstname, p.middle_initial) >= .50) OR\n p.first_name_key IN (SELECT n.name \n FROM nicknames n \n WHERE n.name_id IN (SELECT n2.name_id\n FROM nicknames n2\n WHERE n2.name = firstname))); \n RETURN v_ids;\n END\n ; \n $function$" ) op.replace_entity(public_match_individual_name) op.drop_table('previous_financing_statements')
def test_update_revision(engine) -> None: engine.execute(TO_FLOAT_FROM_INT.to_sql_statement_create()) engine.execute(TO_FLOAT_FROM_TEXT.to_sql_statement_create()) UPDATE = PGFunction( schema="public", signature="to_float(x integer)", definition=""" returns float as $$ select x::text::float $$ language SQL; """, ) register_entities([UPDATE, TO_FLOAT_FROM_TEXT], entity_types=[PGFunction]) # Autogenerate a new migration # It should detect the change we made and produce a "replace_function" statement run_alembic_command( engine=engine, command="revision", command_kwargs={ "autogenerate": True, "rev_id": "2", "message": "replace" }, ) migration_replace_path = TEST_VERSIONS_ROOT / "2_replace.py" with migration_replace_path.open() as migration_file: migration_contents = migration_file.read() # One up and one down assert migration_contents.count("op.replace_entity") == 2 assert "op.create_entity" not in migration_contents assert "op.drop_entity" not in migration_contents assert "from alembic_utils.pg_function import PGFunction" in migration_contents # Execute upgrade run_alembic_command(engine=engine, command="upgrade", command_kwargs={"revision": "head"}) # Execute Downgrade run_alembic_command(engine=engine, command="downgrade", command_kwargs={"revision": "base"})
def test_update_revision(engine) -> None: engine.execute(TO_UPPER.to_sql_statement_create()) # Update definition of TO_UPPER UPDATED_TO_UPPER = PGFunction( TO_UPPER.schema, TO_UPPER.signature, r'''returns text as $$ select upper(some_text) || 'def' -- """ \n \\ $$ language SQL immutable strict;''', ) register_entities([UPDATED_TO_UPPER]) # Autogenerate a new migration # It should detect the change we made and produce a "replace_function" statement run_alembic_command( engine=engine, command="revision", command_kwargs={ "autogenerate": True, "rev_id": "2", "message": "replace" }, ) migration_replace_path = TEST_VERSIONS_ROOT / "2_replace.py" with migration_replace_path.open() as migration_file: migration_contents = migration_file.read() assert "op.replace_entity" in migration_contents assert "op.create_entity" not in migration_contents assert "op.drop_entity" not in migration_contents assert "from alembic_utils.pg_function import PGFunction" in migration_contents # Execute upgrade run_alembic_command(engine=engine, command="upgrade", command_kwargs={"revision": "head"}) # Execute Downgrade run_alembic_command(engine=engine, command="downgrade", command_kwargs={"revision": "base"})
def test_has_no_parameters(engine) -> None: # Error was occuring in drop statement when function had no parameters # related to parameter parsing to drop default statements SIDE_EFFECT = PGFunction( schema="public", signature="side_effect()", definition=""" returns integer as $$ select 1; $$ language SQL; """, ) # Register no functions locally register_entities([SIDE_EFFECT], schemas=["public"], entity_types=[PGFunction]) run_alembic_command( engine=engine, command="revision", command_kwargs={ "autogenerate": True, "rev_id": "1", "message": "no_arguments" }, ) migration_create_path = TEST_VERSIONS_ROOT / "1_no_arguments.py" with migration_create_path.open() as migration_file: migration_contents = migration_file.read() assert "op.drop_entity" in migration_contents # Execute upgrade run_alembic_command(engine=engine, command="upgrade", command_kwargs={"revision": "head"}) # Execute Downgrade run_alembic_command(engine=engine, command="downgrade", command_kwargs={"revision": "base"})
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_index("idx_40750_similarity_opinion_b_id", table_name="similarity") op.drop_index("idx_40750_similarity_opinion_a_id", table_name="similarity") op.drop_table("similarity") op.drop_index("opinionparenthetical_citing_opinion_id", table_name="opinionparenthetical") op.drop_index("opinionparenthetical_cited_opinion_id", table_name="opinionparenthetical") op.drop_table("opinionparenthetical") op.drop_index("idx_40705_opinion_cluster_id", table_name="opinion") op.drop_table("opinion") op.drop_index("idx_40753_clustercitation_citing_cluster_id", table_name="clustercitation") op.drop_index("idx_40753_clustercitation_cited_cluster_id", table_name="clustercitation") op.drop_table("clustercitation") op.drop_index("searchable_case_name_idx", table_name="cluster") op.drop_table("cluster") op.drop_index("idx_40711_citation_citing_opinion_id", table_name="citation") op.drop_index("idx_40711_citation_cited_opinion_id", table_name="citation") op.drop_table("citation") # ### end Alembic commands ### public_pg_trgm = PGExtension(schema="public", signature="pg_trgm") op.drop_entity(public_pg_trgm) public_cluster_update_searchable_case_name = PGTrigger( schema="public", signature="update_searchable_case_name", on_entity="public.cluster", is_constraint=False, definition= "BEFORE INSERT OR UPDATE ON public.cluster\n FOR EACH ROW EXECUTE PROCEDURE public.update_searchable_case_name_trigger()", ) op.drop_entity(public_cluster_update_searchable_case_name) public_update_searchable_case_name_trigger = PGFunction( schema="public", signature="update_searchable_case_name_trigger()", definition= "RETURNS trigger\n LANGUAGE plpgsql\n AS $$\n begin\n new.searchable_case_name := \n to_tsvector('pg_catalog.english', new.case_name || ' ' || coalesce(new.reporter, '') || ' ' || new.year);\n return new;\n end\n $$", ) op.drop_entity(public_update_searchable_case_name_trigger)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### public_searchkey_aircraft = PGFunction( schema="public", signature="searchkey_aircraft(aircraft_number character varying)", definition= "returns character varying\n LANGUAGE plpgsql\nAS $function$\n DECLARE\n v_search_key VARCHAR(25);\n BEGIN\n v_search_key := TRIM(REGEXP_REPLACE(aircraft_number,'\\s|-','','gi'));\n IF (LENGTH(v_search_key) > 6) THEN\n v_search_key := RIGHT(v_search_key, 6);\n END IF;\n RETURN v_search_key;\n END\n ; \n $function$" ) op.replace_entity(public_searchkey_aircraft) op.drop_column('search_results', 'account_name') op.drop_column('search_results', 'doc_storage_url') op.drop_column('search_results', 'callback_url') op.drop_index(op.f('ix_event_tracking_key_id'), table_name='event_tracking') op.drop_index(op.f('ix_event_tracking_event_ts'), table_name='event_tracking') op.drop_index(op.f('ix_event_tracking_event_tracking_type'), table_name='event_tracking') op.drop_table('event_tracking') op.drop_table('event_tracking_types') # Added manually op.execute(DropSequence(Sequence('event_tracking_id_seq')))
searchkey_name_match = PGFunction( schema="public", signature="searchkey_name_match(search_key IN VARCHAR, name1 IN VARCHAR, name2 IN VARCHAR, name3 IN varchar)", definition=""" RETURNS int LANGUAGE plpgsql AS $$ -- Cartesion cross-product on name: search key may have up to 3 names, an exact match on any name is a hit. -- search_key is party.first_name_key to match on: may be 3 names separated by a space character. -- name1, name2, name3 are names already parsed from the search criteria: name2 and name3 may be null. DECLARE v_name1 VARCHAR(50); v_name2 VARCHAR(50); v_name3 VARCHAR(50); v_match_count integer; BEGIN v_match_count := 0; v_name1 = split_part(search_key, ' ', 1); v_name2 = split_part(search_key, ' ', 2); -- May be null v_name3 = split_part(search_key, ' ', 3); -- May be null IF (v_name1 = name1 OR (name2 IS NOT NULL AND v_name1 = name2) OR (name3 IS NOT NULL AND v_name1 = name3)) THEN v_match_count := 1; ELSIF (v_name2 IS NOT NULL AND v_name2 = name1 OR (name2 IS NOT NULL AND v_name2 = name2) OR (name3 IS NOT NULL AND v_name2 = name3)) THEN v_match_count := 1; ELSIF (v_name3 IS NOT NULL AND v_name3 = name1 OR (name2 IS NOT NULL AND v_name3 = name2) OR (name3 IS NOT NULL AND v_name3 = name3)) THEN v_match_count := 1; END IF; RETURN v_match_count; END ; $$; """ )
"""Maintain db function searchkey_mhr here.""" from alembic_utils.pg_function import PGFunction searchkey_mhr = PGFunction(schema="public", signature="searchkey_mhr(mhr_number IN VARCHAR)", definition=""" RETURNS VARCHAR LANGUAGE plpgsql AS $$ DECLARE v_search_key VARCHAR(6); BEGIN v_search_key := TRIM(REGEXP_REPLACE(mhr_number,'[^0-9A-Za-z]','','gi')); v_search_key := LPAD(REGEXP_REPLACE(v_search_key,'[$A-Za-z]','0'),6,'0'); RETURN v_search_key; END ; $$; """)
searchkey_vehicle = PGFunction( schema="public", signature="searchkey_vehicle(serial_number IN VARCHAR)", definition=""" RETURNS VARCHAR LANGUAGE plpgsql AS $$ DECLARE v_search_key VARCHAR(25); v_vin VARCHAR(25); v_digits VARCHAR(25); v_length_digits integer; v_last_digit_index integer; BEGIN v_search_key := UPPER(REGEXP_REPLACE(serial_number,'[^0-9A-Za-z]','','gi')); v_digits := regexp_replace(v_search_key, '[^0-9]', '', 'g'); v_length_digits := length(v_digits); v_vin := ''; IF (v_length_digits > 0) THEN v_last_digit_index := length(v_search_key) - length(regexp_replace(v_search_key, '.*' || RIGHT(v_digits, 1),'', 'g')); ELSE v_last_digit_index := 0; END IF; SELECT CASE WHEN v_length_digits = 0 THEN '000000' WHEN v_length_digits = 1 THEN LPAD(v_digits, 6, '0') WHEN RIGHT(v_search_key, 1) BETWEEN '0' AND '9' THEN LPAD(RIGHT(v_search_key, LEAST(LENGTH(v_search_key), 6)), 6, '0') WHEN RIGHT(v_search_key, 1) IN ('B','C','G','H','I','L','S','O','Y','Z') AND LEFT(RIGHT(v_search_key, 2), 1) NOT BETWEEN 'A' AND 'Z' THEN LPAD(RIGHT(v_search_key, 6), 6, '0') WHEN RIGHT(v_search_key, 1) NOT IN ('B','C','G','H','I','L','S','O','Y','Z') AND LEFT(RIGHT(v_search_key, 2), 1) BETWEEN '0' AND '9' AND LENGTH(v_search_key) > 6 THEN LEFT(RIGHT(v_search_key, 7), 6) WHEN RIGHT(v_search_key, 1) BETWEEN 'A' AND 'Z' AND LEFT(RIGHT(v_search_key, 2), 1) BETWEEN 'A' AND 'Z' AND LENGTH(v_search_key) < 6 THEN LPAD(REGEXP_REPLACE(v_search_key,'[$A-Za-z]','', 'g'), 6, '0') WHEN RIGHT(v_search_key, 1) BETWEEN 'A' AND 'Z' AND LEFT(RIGHT(v_search_key, 2), 1) NOT BETWEEN 'A' AND 'Z' AND v_last_digit_index BETWEEN 1 AND 5 THEN LPAD(REGEXP_REPLACE(v_search_key,'[$A-Za-z]','', 'g'), 6, '0') WHEN RIGHT(v_search_key, 1) BETWEEN 'A' AND 'Z' AND LEFT(RIGHT(v_search_key, 2), 1) BETWEEN 'A' AND 'Z' AND LENGTH(v_search_key) > 6 -- This does not appear to always work correctly. THEN LPAD(SUBSTR(v_search_key, (v_last_digit_index - 5), 5), 6, '0') WHEN RIGHT(v_search_key, 1) BETWEEN 'A' AND 'Z' AND LEFT(RIGHT(v_search_key, 2), 1) BETWEEN 'A' AND 'Z' AND LENGTH(v_search_key) > 6 -- This does not appear to always work correctly. THEN SUBSTR(v_search_key, (v_last_digit_index - 5), 6) END INTO v_vin; IF (LENGTH(v_vin) > 0) THEN v_search_key := v_vin; END IF; IF (LENGTH(v_search_key) > 6) THEN v_search_key := RIGHT(v_search_key, 6); END IF; v_search_key := REGEXP_REPLACE( REPLACE( REPLACE( REPLACE( REPLACE( REPLACE( REPLACE( REPLACE( REPLACE( REPLACE( REPLACE(v_search_key,'I','1'), 'L','1'), 'Z','2'), 'H','4'), 'Y','4'), 'S','5'), 'C','6'), 'G','6'), 'B','8'), 'O','0'), '[A-Za-z]','0'); RETURN v_search_key; END ; $$; """)
"""Maintain db function searchkey_first_name here.""" from alembic_utils.pg_function import PGFunction searchkey_first_name = PGFunction( schema="public", signature="searchkey_first_name(actual_name IN VARCHAR)", definition=""" RETURNS VARCHAR LANGUAGE plpgsql AS $$ DECLARE v_search_key VARCHAR(92); BEGIN -- Remove prefixes v_search_key := REGEXP_REPLACE(actual_name,'^DR |^DR.|^DR. |^MR |^MR.|^MR. |^MRS |^MRS.|^MRS. |^MS |^MS.|^MS. ','','gi'); -- Remove suffixes v_search_key := REGEXP_REPLACE(v_search_key,' JR$| JR.$| JR. $| SR$| SR $| SR.$| SR. $','','gi'); v_search_key := REGEXP_REPLACE(v_search_key,'[^0-9A-Za-z]',' ','gi'); -- Remove internal extra space characters v_search_key := TRIM(REGEXP_REPLACE(v_search_key,'( ){2,}',' ','g')); RETURN UPPER(v_search_key); END ; $$; """)
"""Maintain db function searchkey_aircraft here.""" from alembic_utils.pg_function import PGFunction searchkey_aircraft = PGFunction( schema="public", signature="searchkey_aircraft(aircraft_number IN VARCHAR)", definition=""" RETURNS VARCHAR LANGUAGE plpgsql AS $$ DECLARE v_search_key VARCHAR(25); BEGIN v_search_key := TRIM(REGEXP_REPLACE(aircraft_number,'\s|-','','gi')); IF (LENGTH(v_search_key) > 6) THEN v_search_key := RIGHT(v_search_key, 6); END IF; RETURN v_search_key; END ; $$; """)
from alembic_utils.pg_extension import PGExtension from alembic_utils.pg_function import PGFunction from alembic_utils.pg_trigger import PGTrigger # NOTE: When adding any new entities, remember to add it to the PG_ENTITY_LIST at the bottom of this file. pg_trgm_extension = PGExtension(schema="public", signature="pg_trgm") update_searchable_case_name_func = PGFunction( schema="public", signature="update_searchable_case_name_trigger()", definition=""" RETURNS trigger LANGUAGE plpgsql AS $$ begin new.searchable_case_name := to_tsvector('pg_catalog.english', new.case_name || ' ' || coalesce(new.reporter, '') || ' ' || new.year); return new; end $$; """, ) update_searchable_case_name_trigger = PGTrigger( schema="public", signature="update_searchable_case_name", on_entity="public.cluster", definition=""" BEFORE INSERT OR UPDATE ON public.cluster FOR EACH ROW EXECUTE PROCEDURE public.update_searchable_case_name_trigger()
"""Maintain db function get_draft_document_number here.""" from alembic_utils.pg_function import PGFunction get_draft_document_number = PGFunction(schema="public", signature="get_draft_document_number()", definition=""" RETURNS VARCHAR LANGUAGE plpgsql AS $$ BEGIN RETURN 'D' || trim(to_char(nextval('document_number_seq'), '0000000')); END ; $$; """)
conn = engine conn.execute(""" create table public.account ( id serial primary key, email text not null ); """) yield conn.execute("drop table public.account cascade") FUNC = PGFunction.from_sql( """create function public.downcase_email() returns trigger as $$ begin return new; end; $$ language plpgsql; """) TRIG = PGTrigger( schema="public", signature="lower_account_EMAIL", on_entity="public.account", definition=""" BEFORE INSERT ON public.account FOR EACH ROW EXECUTE PROCEDURE public.downcase_email() """, )
searchkey_nickname_match = PGFunction( schema="public", signature= "searchkey_nickname_match(search_key IN VARCHAR, name1 IN VARCHAR, name2 IN VARCHAR, name3 IN varchar)", definition=""" RETURNS int LANGUAGE plpgsql AS $$ -- Cartesion cross-product on nickname: search key may have up to 3 names, a nickname match on any name is a hit. -- search_key is party.first_name_key to match on: may be 3 names separated by a space character. -- name1, name2, name3 are names already parsed from the search criteria: name2 and name3 may be null. DECLARE v_name1 VARCHAR(50); v_name2 VARCHAR(50); v_name3 VARCHAR(50); v_match_count integer; BEGIN v_match_count := 0; v_name1 = split_part(search_key, ' ', 1); v_name2 = split_part(search_key, ' ', 2); -- May be null v_name3 = split_part(search_key, ' ', 3); -- May be null SELECT COUNT(name_id) INTO v_match_count FROM nicknames n1 WHERE (name = v_name1 AND n1.name_id IN (SELECT n2.name_id FROM nicknames n2 WHERE n2.name IN (name1, name2, name3))) OR (v_name2 IS NOT NULL AND name = v_name2 AND n1.name_id IN (SELECT n2.name_id FROM nicknames n2 WHERE n2.name IN (name1, name2, name3))) OR (v_name3 IS NOT NULL AND name = v_name3 AND n1.name_id IN (SELECT n2.name_id FROM nicknames n2 WHERE n2.name IN (name1, name2, name3))); RETURN v_match_count; END ; $$; """)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.execute(CreateSequence(Sequence("citation_seq"))) op.create_table( "citation", sa.Column( "id", sa.BigInteger(), server_default=sa.text("nextval('citation_seq')"), nullable=False, ), sa.Column("citing_opinion_id", sa.BigInteger(), nullable=True), sa.Column("cited_opinion_id", sa.BigInteger(), nullable=True), sa.Column("depth", sa.BigInteger(), nullable=True), sa.PrimaryKeyConstraint("id"), ) op.create_index( "idx_40711_citation_cited_opinion_id", "citation", ["cited_opinion_id"], unique=False, ) op.create_index( "idx_40711_citation_citing_opinion_id", "citation", ["citing_opinion_id"], unique=False, ) op.execute(CreateSequence(Sequence("cluster_seq"))) op.create_table( "cluster", sa.Column( "id", sa.BigInteger(), server_default=sa.text("nextval('cluster_seq')"), nullable=False, ), sa.Column("resource_id", sa.BigInteger(), nullable=True), sa.Column("case_name", sa.Text(), nullable=True), sa.Column("reporter", sa.Text(), nullable=True), sa.Column("citation_count", sa.BigInteger(), nullable=True), sa.Column("cluster_uri", sa.Text(), nullable=True), sa.Column("docket_uri", sa.Text(), nullable=True), sa.Column("year", sa.BigInteger(), nullable=True), sa.Column("time", sa.BigInteger(), nullable=True), sa.Column("searchable_case_name", postgresql.TSVECTOR(), nullable=True), sa.Column("court", sa.Text(), nullable=True), sa.PrimaryKeyConstraint("id"), ) op.execute(CreateSequence(Sequence("clustercitation_seq"))) op.create_table( "clustercitation", sa.Column( "id", sa.BigInteger(), server_default=sa.text("nextval('clustercitation_seq')"), nullable=False, ), sa.Column("citing_cluster_id", sa.BigInteger(), nullable=True), sa.Column("cited_cluster_id", sa.BigInteger(), nullable=True), sa.Column("depth", sa.BigInteger(), nullable=True), sa.PrimaryKeyConstraint("id"), ) op.create_index( "idx_40753_clustercitation_cited_cluster_id", "clustercitation", ["cited_cluster_id"], unique=False, ) op.create_index( "idx_40753_clustercitation_citing_cluster_id", "clustercitation", ["citing_cluster_id"], unique=False, ) op.execute(CreateSequence(Sequence("opinion_seq"))) op.create_table( "opinion", sa.Column( "id", sa.BigInteger(), server_default=sa.text("nextval('opinion_seq')"), nullable=False, ), sa.Column("resource_id", sa.BigInteger(), nullable=True), sa.Column("opinion_uri", sa.Text(), nullable=True), sa.Column("cluster_uri", sa.Text(), nullable=True), sa.Column("cluster_id", sa.BigInteger(), nullable=True), sa.Column("html_text", sa.Text(), nullable=True), sa.PrimaryKeyConstraint("id"), ) op.create_index("idx_40705_opinion_cluster_id", "opinion", ["cluster_id"], unique=False) op.execute(CreateSequence(Sequence("opinionparenthetical_id_seq"))) op.create_table( "opinionparenthetical", sa.Column( "id", sa.Integer(), server_default=sa.text("nextval('opinionparenthetical_id_seq')"), nullable=False, ), sa.Column("citing_opinion_id", sa.Integer(), nullable=False), sa.Column("cited_opinion_id", sa.Integer(), nullable=False), sa.Column("text", sa.Text(), nullable=False), sa.PrimaryKeyConstraint("id"), ) op.create_index( "opinionparenthetical_cited_opinion_id", "opinionparenthetical", ["cited_opinion_id"], unique=False, ) op.create_index( "opinionparenthetical_citing_opinion_id", "opinionparenthetical", ["citing_opinion_id"], unique=False, ) op.execute(CreateSequence(Sequence("similarity_seq"))) op.create_table( "similarity", sa.Column( "id", sa.BigInteger(), server_default=sa.text("nextval('similarity_seq')"), nullable=False, ), sa.Column("opinion_a_id", sa.BigInteger(), nullable=True), sa.Column("opinion_b_id", sa.BigInteger(), nullable=True), sa.Column("similarity_index", sa.Float(), nullable=True), sa.PrimaryKeyConstraint("id"), ) op.create_index( "idx_40750_similarity_opinion_a_id", "similarity", ["opinion_a_id"], unique=False, ) op.create_index( "idx_40750_similarity_opinion_b_id", "similarity", ["opinion_b_id"], unique=False, ) # ### end Alembic commands ### op.create_index( "searchable_case_name_idx", "cluster", ["searchable_case_name"], unique=False, postgresql_using="gin", ) public_pg_trgm = PGExtension(schema="public", signature="pg_trgm") op.create_entity(public_pg_trgm) public_update_searchable_case_name_trigger = PGFunction( schema="public", signature="update_searchable_case_name_trigger()", definition= "RETURNS trigger\n LANGUAGE plpgsql\n AS $$\n begin\n new.searchable_case_name := \n to_tsvector('pg_catalog.english', new.case_name || ' ' || coalesce(new.reporter, '') || ' ' || new.year);\n return new;\n end\n $$", ) op.create_entity(public_update_searchable_case_name_trigger) public_cluster_update_searchable_case_name = PGTrigger( schema="public", signature="update_searchable_case_name", on_entity="public.cluster", is_constraint=False, definition= "BEFORE INSERT OR UPDATE ON public.cluster\n FOR EACH ROW EXECUTE PROCEDURE public.update_searchable_case_name_trigger()", ) op.create_entity(public_cluster_update_searchable_case_name)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'search_results', sa.Column('callback_url', sa.String(length=1000), nullable=True)) op.add_column( 'search_results', sa.Column('doc_storage_url', sa.String(length=1000), nullable=True)) op.add_column( 'search_results', sa.Column('account_name', sa.String(length=1000), nullable=True)) public_searchkey_aircraft = PGFunction( schema="public", signature="searchkey_aircraft(aircraft_number IN VARCHAR)", definition= "RETURNS VARCHAR\n LANGUAGE plpgsql\n AS\n $$\n DECLARE\n v_search_key VARCHAR(25);\n BEGIN\n v_search_key := TRIM(right(REGEXP_REPLACE(aircraft_number,'[^\\w]+','','gi'),6));\n RETURN v_search_key;\n END\n ; \n $$" ) op.replace_entity(public_searchkey_aircraft) # Added manually op.execute( CreateSequence(Sequence('event_tracking_id_seq', start=1, increment=1))) event_tracking_types = op.create_table( 'event_tracking_types', sa.Column('event_tracking_type', sa.String(length=20), nullable=False), sa.Column('event_tracking_desc', sa.String(length=100), nullable=False), sa.PrimaryKeyConstraint('event_tracking_type')) op.create_table( 'event_tracking', sa.Column('id', sa.Integer(), nullable=False), sa.Column('key_id', sa.Integer(), nullable=False), sa.Column('event_ts', sa.DateTime(), nullable=False), sa.Column('event_tracking_type', sa.String(length=20), nullable=False), sa.Column('status', sa.Integer(), nullable=True), sa.Column('message', sa.String(length=2000), nullable=True), sa.Column('email_address', sa.String(length=250), nullable=True), sa.ForeignKeyConstraint( ['event_tracking_type'], ['event_tracking_types.event_tracking_type'], ), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_event_tracking_event_tracking_type'), 'event_tracking', ['event_tracking_type'], unique=False) op.create_index(op.f('ix_event_tracking_event_ts'), 'event_tracking', ['event_ts'], unique=False) op.create_index(op.f('ix_event_tracking_key_id'), 'event_tracking', ['key_id'], unique=False) # Added manually op.bulk_insert( event_tracking_types, [{ 'event_tracking_type': 'SEARCH_REPORT', 'event_tracking_desc': 'Search Detail large report generation and storage.' }, { 'event_tracking_type': 'API_NOTIFICATION', 'event_tracking_desc': 'Notification by API callback of successful event outcome.' }, { 'event_tracking_type': 'EMAIL', 'event_tracking_desc': 'Email notification.' }, { 'event_tracking_type': 'SURFACE_MAIL', 'event_tracking_desc': 'Surface mail delivery of report to service provider.' }, { 'event_tracking_type': 'EMAIL_REPORT', 'event_tracking_desc': 'Email delivery of report.' }])
from alembic_utils.pg_function import PGFunction from alembic_utils.replaceable_entity import register_entities from alembic_utils.testbase import TEST_VERSIONS_ROOT, run_alembic_command TO_FLOAT_FROM_INT = PGFunction( schema="public", signature="to_float(x integer)", definition=""" returns float as $$ select x::float $$ language SQL; """, ) TO_FLOAT_FROM_TEXT = PGFunction( schema="public", signature="to_float(x text)", definition=""" returns float as $$ select x::float $$ language SQL; """, ) def test_create_revision(engine) -> None: register_entities([TO_FLOAT_FROM_INT, TO_FLOAT_FROM_TEXT], entity_types=[PGFunction]) run_alembic_command( engine=engine,
from alembic_utils.pg_function import PGFunction from alembic_utils.replaceable_entity import register_entities from alembic_utils.testbase import TEST_VERSIONS_ROOT, run_alembic_command TO_UPPER = PGFunction( schema="public", signature="toUpper(some_text text default 'my text!')", definition=""" returns text as $$ begin return upper(some_text) || 'abc'; end; $$ language PLPGSQL; """, ) def test_create_revision(engine) -> None: register_entities([TO_UPPER]) run_alembic_command( engine=engine, command="revision", command_kwargs={ "autogenerate": True, "rev_id": "1", "message": "create" }, ) migration_create_path = TEST_VERSIONS_ROOT / "1_create.py" with migration_create_path.open() as migration_file:
searchkey_business_name = PGFunction( schema="public", signature="searchkey_business_name(actual_name IN VARCHAR)", definition=""" RETURNS VARCHAR COST 100 VOLATILE PARALLEL UNSAFE LANGUAGE plpgsql AS $$ DECLARE v_search_key VARCHAR(40); BEGIN if LENGTH(SPLIT_PART(REGEXP_REPLACE(actual_name,'[A-Z]+','','g'),' ',1))>=5 then v_search_key := REGEXP_REPLACE(actual_name,'^0000|^000|^00|^0','','g'); v_search_key := REGEXP_REPLACE(SPLIT_PART(v_search_key,' ',1),'[A-Za-z]+','','g'); v_search_key := REGEXP_REPLACE(v_search_key,'[^\w]+|[A-Za-z]+','','g'); else v_search_key := REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE( REGEXP_REPLACE(actual_name,'INVESTMENTS|INVESTMENT','','gi' ), 'AUTO | AERO|DEVELOPMENTS|DEVELOPMENT|ENTERPRISES|ENTERPRISE|EQUIPMENT|GROUP|HOLDINGS|HOLDING|HOMES|INDUSTRIES|MANAGEMENT|MOTORS|PRODUCTIONS|PRODUCTS|PROPERTIES|PROPERTY|RENTALS|SALES|SERVICES|SERVICE|SOLUTIONS|SYSTEMS|TRANSPORT|TRUCKING|VENTURES' ,'','gi') ,'CONSULTING','','gi') ,'LOGISTICS','','gi') ,'MECHANICAL','','gi') ,'AUTOMOBILE|AUTOBODY','','gi') ,'AVENUE|STREET','','gi') ,' EAST | WEST | SOUTH | NORTH ','','gi') ,'CONSTRUCTION|CONTRACTING|CONTRACTORS','','gi') ,'LIMITED PARTNERSHIP| LIMITED| PARTNERSHIP','','gi') ,'SOCIETY|ASSOCIATION|TRUST|SOCIETE','','gi') ,'BRITISH COLUMBIA|BRITISHCOLUMBIA','BC','gi') ,'INCORPORATED|INCORPOREE|INCORPORATION|INCORP|INC.$|INC$','','gi') ,'COMPANY| CORPORATION|CORPORATION$| CORPS| CORP| CO.$| CO.,$| CO$| CO.$','','gi') ,'LIMITEE$|LTEE$| LTD| LTD.|LTD.$|LTD$|LTD,.$','','gi') ,' B.C.| B.C| BC.',' BC ','g') ,' DEV | DEV. ','','gi') ,' ULC$','','gi') ,'^THE ','','gi') ,'\([^()]*\)','','gi') ,'&', 'AND','gi') ,'[^\w]+',' ','gi') ; end if; if v_search_key = ' ' then v_search_key := regexp_replace(actual_name,'[^\w]+','','gi'); end if; v_search_key := trim(regexp_replace(v_search_key,'\s+',' ','gi')); RETURN v_search_key; END ; $$; """)
) ReflectedExcludedView = PGView( schema="public", signature="exclude_name_reflected_view", definition="select 1 as one", ) FuncDef = """ returns text as $$ begin return upper(some_text) || 'abc'; end; $$ language PLPGSQL; """ IncludedFunc = PGFunction( schema="public", signature="toUpper(some_text text default 'my text!')", definition=FuncDef) ObjExcludedFunc = PGFunction( schema="public", signature="exclude_obj_toUpper(some_text text default 'my text!')", definition=FuncDef, ) ReflectedIncludedFunc = PGFunction( schema="public", signature="reflected_toUpper(some_text text default 'my text!')", definition=FuncDef, ) ReflectedExcludedFunc = PGFunction(