def rest_setting(setting: Dict): """Takes a dictionary representation of a setting and maps it into the SETTING table of the PostgreSQL database""" conn = get_database_connection(local_dev=local_dev) curs = conn.cursor() curs.execute( """ INSERT INTO SETTING (CASE_NUMBER, CASE_LINK, SETTING_TYPE, SETTING_STYLE, JUDICIAL_OFFICER, SETTING_DATE, SETTING_TIME, HEARING_TYPE) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) ON CONFLICT(CASE_NUMBER, SETTING_TYPE, HEARING_TYPE, SETTING_DATE) DO NOTHING """, ( setting["case_number"], setting["case_link"], setting["setting_type"], setting["setting_style"], setting["judicial_officer"], setting["setting_date"], setting["setting_time"], setting["hearing_type"], ), ) conn.commit() curs.close() conn.close()
def get_case(case_id: str) -> Dict: conn = get_database_connection(local_dev=local_dev) # conn.row_factory = sqlite3.Row curs = conn.cursor() curs.execute("SELECT * FROM V_CASE WHERE CASE_NUMBER = ?", (case_id,)) case = curs.fetchone() curs.close() return dict(case)
def rest_case(case): """ Takes a dictionary representation of a case and maps it in to a PostgreSQL DB """ conn = get_database_connection(local_dev=local_dev) # conn.execute("pragma journal_mode=wal") curs = conn.cursor() curs.execute( """ INSERT INTO CASE_DETAIL (CASE_NUMBER, STATUS, REGISTER_URL, PRECINCT, STYLE, PLAINTIFF, DEFENDANTS, PLAINTIFF_ZIP, DEFENDANT_ZIP, CASE_TYPE) VALUES (%(case_num)s, %(status)s, %(reg_url)s, %(prec_num)s, %(style)s, %(plaint)s, %(defend)s, %(plaint_zip)s, %(defend_zip)s, %(type)s) ON CONFLICT(CASE_NUMBER) DO UPDATE SET (STATUS, REGISTER_URL, PRECINCT, STYLE, PLAINTIFF, DEFENDANTS, PLAINTIFF_ZIP, DEFENDANT_ZIP, CASE_TYPE) = (%(status)s, %(reg_url)s, %(prec_num)s, %(style)s, %(plaint)s, %(defend)s, %(plaint_zip)s, %(defend_zip)s, %(type)s) """, { 'case_num': case["case_number"], 'status': case["status"], 'reg_url': case["register_url"], 'prec_num': case["precinct_number"], 'style': case["style"], 'plaint': case["plaintiff"], 'defend': case["defendants"], 'plaint_zip': case["plaintiff_zip"], 'defend_zip': case["defendant_zip"], 'type': case["type"] }, ) curs.execute( """ INSERT INTO DISPOSITION (CASE_NUMBER, TYPE, DATE, AMOUNT, AWARDED_TO, AWARDED_AGAINST) VALUES (%(case_num)s, %(disp_type)s, %(disp_date)s, %(disp_amt)s, %(disp_to)s, %(disp_against)s) ON CONFLICT(CASE_NUMBER) DO UPDATE SET (TYPE, DATE, AMOUNT, AWARDED_TO, AWARDED_AGAINST) = (%(disp_type)s, %(disp_date)s, %(disp_amt)s, %(disp_to)s, %(disp_against)s) """, { 'case_num': case["case_number"], 'disp_type': case["disposition_type"], 'disp_date': case["disposition_date"], 'disp_amt': str(case["disposition_amount"]), 'disp_to': case["disposition_awarded_to"], 'disp_against': case["disposition_awarded_against"], }, )
def get_old_active_case_nums() -> List[str]: """Returns list of case numbers in CASE_DETAIL table that are still active (as determined by the STATUS column).""" conn = get_database_connection(local_dev=local_dev) curs = conn.cursor() curs.execute( """SELECT CASE_NUMBER FROM CASE_DETAIL WHERE LOWER(STATUS) NOT IN ('final disposition', 'transferred', 'bankruptcy', 'judgment released', 'judgment satisfied', 'appealed', 'final status', 'dismissed')""" ) active_case_nums = [tup[0] for tup in curs.fetchall()] curs.close() conn.close() return active_case_nums
def drop_rows_from_table(table_name: str, case_ids: list): """Drops all rows with case number in case_ids from table `table_name` - works for CASE_DETAIL, DISPOSITION, and EVENT tables""" if len(case_ids) == 1: case_ids = str(tuple(case_ids)).replace(",", "") else: case_ids = str(tuple(case_ids)) conn = get_database_connection(local_dev=local_dev) curs = conn.cursor() if table_name == "CASE_DETAIL": curs.execute("DELETE FROM %s WHERE CASE_NUMBER IN %s", (table_name, case_ids)) else: curs.execute("DELETE FROM %s WHERE CASE_NUMBER IN %s", (table_name, case_ids)) conn.commit() curs.close() conn.close()
def dump_to_sheets(sheet, worksheet, tables, filter_evictions=False): if os.getenv("LOCAL_DEV") != "true": sheet = gsheet.open_sheet(gsheet.init_sheets(), sheet, worksheet) dfs = [] for table in tables: conn = connect_to_database.get_database_connection(local_dev) sql = "select * from " + table df = pd.read_sql_query(sql, conn) #Group cases with multiple events into the same case number do we want to do this it leads to columns with " , " junk #if table=="events": df = df.groupby("case_detail_id").fillna('').agg(', '.join).reset_index() dfs.append(df) df = reduce( lambda left, right: pd.merge( left, vright, von='case_number', vhow='outer'), dfs) if filter_evictions: gsheet.filter_df(df, 'case_type', 'Eviction') gsheet.write_data(sheet, df) else: logger.info( "Not dumping to google sheets because LOCAL_DEV environment variable is 'true'." )
def update_first_court_apperance_column(): """Updates the first_court_appearance column of the CASE_DETAIL table in PostgreSQL using the latest database data.""" update_query = """ UPDATE case_detail SET first_court_appearance = (SELECT MIN (TO_DATE("date", 'MM/DD/YYYY')) FROM event WHERE (event.case_number = case_detail.case_number) AND (LOWER(event.type) IN ('appearance', 'default hearing', 'eviction hearing', 'exparte hearing', 'hearing', 'indigency hearing', 'motion for dj hearing', 'motion hearing', 'pre-trial hearing', 'trial before court', 'writ hearing')) ) """ conn = get_database_connection(local_dev=local_dev) curs = conn.cursor() curs.execute(update_query) conn.commit() curs.close() conn.close()
def dump_to_sheets(sheet, worksheet, sql="SELECT * FROM table"): """Function to dump sql view or table to sheet based on q defaults to selecting all""" sheet = open_sheet(init_sheets(), sheet, worksheet) conn = connect_to_database.get_database_connection(local_dev=local_dev) df = pd.read_sql_query(sql, conn) write_data(sheet, df)
# conn.row_factory = sqlite3.Row curs = conn.cursor() curs.execute("SELECT * FROM V_CASE WHERE CASE_NUMBER = ?", (case_id,)) case = curs.fetchone() curs.close() return dict(case) def rest_case(case: Dict): """ Takes a dictionary representation of a case and maps it into the CASE_DETAIL, DISPOSITION, and EVENT table of the PostgreSQL database """ conn = get_database_connection(local_dev=local_dev) curs = conn.cursor() curs.execute( """ INSERT INTO CASE_DETAIL (CASE_NUMBER, STATUS, REGISTER_URL, PRECINCT, STYLE, PLAINTIFF, DEFENDANTS, PLAINTIFF_ZIP, DEFENDANT_ZIP, CASE_TYPE, DATE_FILED, ACTIVE_OR_INACTIVE, JUDGMENT_AFTER_MORATORIUM) VALUES (%(case_num)s, %(status)s, %(reg_url)s, %(prec_num)s, %(style)s, %(plaint)s, %(defend)s, %(plaint_zip)s, %(defend_zip)s, %(type)s, %(date_filed)s, %(active_or_inactive)s, %(after_moraorium)s) ON CONFLICT(CASE_NUMBER) DO UPDATE SET (STATUS, REGISTER_URL, PRECINCT, STYLE, PLAINTIFF, DEFENDANTS, PLAINTIFF_ZIP, DEFENDANT_ZIP, CASE_TYPE, DATE_FILED, ACTIVE_OR_INACTIVE, JUDGMENT_AFTER_MORATORIUM) = (%(status)s, %(reg_url)s, %(prec_num)s, %(style)s, %(plaint)s, %(defend)s, %(plaint_zip)s, %(defend_zip)s, %(type)s, %(date_filed)s, %(active_or_inactive)s, %(after_moraorium)s) """, { "case_num": case["case_number"], "status": case["status"], "reg_url": case["register_url"],
# conn.row_factory = sqlite3.Row curs = conn.cursor() curs.execute("SELECT * FROM V_CASE WHERE CASE_NUMBER = ?", (case_id, )) case = curs.fetchone() curs.close() return dict(case) def rest_case(case: Dict): """ Takes a dictionary representation of a case and maps it into the CASE_DETAIL, DISPOSITION, and EVENT table of the PostgreSQL database """ conn = get_database_connection(local_dev=config.local_dev) curs = conn.cursor() curs.execute( """ INSERT INTO CASE_DETAIL (CASE_NUMBER, STATUS, REGISTER_URL, PRECINCT, STYLE, PLAINTIFF, DEFENDANTS, PLAINTIFF_ZIP, DEFENDANT_ZIP, CASE_TYPE, DATE_FILED, ACTIVE_OR_INACTIVE, JUDGMENT_AFTER_MORATORIUM) VALUES (%(case_num)s, %(status)s, %(reg_url)s, %(prec_num)s, %(style)s, %(plaint)s, %(defend)s, %(plaint_zip)s, %(defend_zip)s, %(type)s, %(date_filed)s, %(active_or_inactive)s, %(after_moraorium)s) ON CONFLICT(CASE_NUMBER) DO UPDATE SET (STATUS, REGISTER_URL, PRECINCT, STYLE, PLAINTIFF, DEFENDANTS, PLAINTIFF_ZIP, DEFENDANT_ZIP, CASE_TYPE, DATE_FILED, ACTIVE_OR_INACTIVE, JUDGMENT_AFTER_MORATORIUM) = (%(status)s, %(reg_url)s, %(prec_num)s, %(style)s, %(plaint)s, %(defend)s, %(plaint_zip)s, %(defend_zip)s, %(type)s, %(date_filed)s, %(active_or_inactive)s, %(after_moraorium)s) """, { "case_num": case["case_number"], "status": case["status"], "reg_url": case["register_url"],
import pandas as pd from datetime import datetime from dotenv import load_dotenv from arcgis import join_features from arcgis.gis import GIS from arcgis.features import FeatureLayerCollection from connect_to_database import get_database_connection from statuses import statuses_map from emailing import log_and_email logger = logging.getLogger() logging.basicConfig(stream=sys.stdout) logger.setLevel(logging.INFO) load_dotenv() engine = get_database_connection(local_dev=False) ARCGIS_USERNAME, ARCGIS_PASSWORD = os.getenv("ARCGIS_USERNAME"), os.getenv( "ARCGIS_PASSWORD") def overwrite_csv(username: str, password: str, new_df: DataFrame, old_csv_name: str): """ Overwrites the existing table/feature layer named `old_csv_name` using `new_df` Only works if `new_df` has the same columns as the old feature/table (Create an existing table/feature layer by manually uploading a csv to arcGIS and selecting the "Publish this file as a hosted layer" option) """ gis = GIS(url='https://www.arcgis.com', username=username, password=password)