def main(): if not path.exists(config.database_name): # Create connection to database conn = create_connection(config.database_name) build_db(conn,config.TABLES) conn = create_connection(config.database_name) df = query_db(conn) print(df) conn.close()
def query_sent_comp(db_name): # Part B: Queries sentence component to get Most Serious Offense from all sentence components since this variable # is missing in much of dataset A and is needed as our outcome variable (dataset B) start = datetime.datetime.now() query_sentence_component = ''' SELECT OFFENDER_NC_DOC_ID_NUMBER as ID, COMMITMENT_PREFIX, SENTENCE_COMPONENT_NUMBER, PRIMARY_OFFENSE_CODE, PRIMARY_FELONYMISDEMEANOR_CD, SENTENCING_PENALTY_CLASS_CODE, PRIOR_RECORD_LEVEL_CODE, MINIMUM_SENTENCE_LENGTH, MAXIMUM_SENTENCE_LENGTH, SENTENCE_TYPE_CODE, COUNTY_OF_CONVICTION_CODE FROM OFNT3CE1 WHERE SENTENCE_TYPE_CODE LIKE '%PRISONS%'; ''' conn = create_connection(db_name) sent_comp_small = qd.query_db(conn, query_sentence_component) stop = datetime.datetime.now() print("Time Elapsed:", stop - start) return sent_comp_small
def update_db_data(data): # Opens a connection conn = create_connection() # Sets up a cursor c = conn.cursor() # Creates SQL query for updating DB trans_time = datetime.now().isoformat() for coin in data.keys(): if coin not in ALL_COINS: continue sql = ''' INSERT INTO prices(exchange, coin, price, trading_pair, trans_time) VALUES (?, ?, ?, ?, ?) ''' values = ( "GDAX", coin.split('-')[0], data[coin]['price'], coin, trans_time ) c.execute(sql, values) print("SQL run for: ", coin) conn.commit() conn.close()
def get_additional_features(db_name, dataset_flag): # Part E - querying additional features conn = create_connection(db_name) dataset_flag.to_sql('dataset_AB', conn, if_exists='replace', index=False) query = ''' SELECT INMATE_DOC_NUMBER as ID, DISCIPLINARY_INFRACTION_DATE, COMMITMENT_PREFIX, EARLIEST_SENTENCE_EFFECTIVE_DT, END_DATE, COUNT(DISCIPLINARY_INFRACTION_DATE) as INFRACTION_PER_SENT FROM INMT9CF1 A INNER JOIN dataset_AB B WHERE A.INMATE_DOC_NUMBER = B.ID AND A.DISCIPLINARY_INFRACTION_DATE >= B.EARLIEST_SENTENCE_EFFECTIVE_DT AND A.DISCIPLINARY_INFRACTION_DATE <= B.END_DATE GROUP BY INMATE_DOC_NUMBER, COMMITMENT_PREFIX ; ''' disc_infraction = qd.query_db(conn, query) conn.close # Divide infractions by # of sentences if there are dups on ID / DISCIPLINARY_INFRACTION_DATE # might indicate concurrent sentences count_dups = disc_infraction.groupby([ 'ID', 'DISCIPLINARY_INFRACTION_DATE' ])["ID"].count().reset_index(name="count") disc_infraction = disc_infraction.merge(count_dups, how='left') disc_infraction['INFRACTION_PER_SENT'] = round( disc_infraction['INFRACTION_PER_SENT'] / disc_infraction['count']) disc_infraction = disc_infraction.loc[:, [ 'ID', 'COMMITMENT_PREFIX', 'INFRACTION_PER_SENT' ]] print("Disc Infractions", disc_infraction.shape) # Merge on disciplinary infractions, replace missing to 0 dataset_flag = dataset_flag.merge(disc_infraction, how='left', on=['ID', 'COMMITMENT_PREFIX']) dataset_flag.loc[dataset_flag['INFRACTION_PER_SENT'].isnull(), 'INFRACTION_PER_SENT'] = 0 return dataset_flag
def main(): conn = create_connection(config.database_name) table_name = 'test' test_df = pd.DataFrame({'name': ['John', 'Karen'], 'age': [41, 32]}) columns = test_df.columns columns = ', '.join(columns) # sql_create_test_table = """DROP TABLE IF EXISTS test; # CREATE TABLE IF NOT EXISTS test (name,age);""" c = conn.cursor() c.execute("DROP TABLE IF EXISTS test;") c.execute("CREATE TABLE IF NOT EXISTS test (name,age);") records = extract_data(test_df) insert_records(conn,table_name,columns,records) conn.commit() conn.close()
def add(location_id, user_id, location_name, location_address): print("Add") database = r"C:\Users\Skull\PycharmProjects\Routing\carpool_db.db" conn = create_connection(database) cur = conn.cursor() try: # location_id, # user_id, query = """INSERT INTO user_locations ( location_id, user_id, location_name, location_address) VALUES (?,?,?,?);""" data_tuple = (location_id, user_id, location_name, location_address) conn.execute(query, data_tuple) conn.commit() conn.close() except sqlite3.Error as e: print("SQLite Error: %s" % str(e)) return None
hosts = tuple([host for host in hostnames['switches'].keys()]) locations = tuple( [host for host in hostnames['switches'].values()]) res = list(zip(hosts, locations)) return res else: return None if __name__ == '__main__': res = [] list_of_datas = sorted(glob.glob('*_dhcp_snooping.txt')) switches = data_reader_yaml('switches.yml') dhcp = data_reader_dhcp(list_of_datas) db_exists = os.path.exists('dhcp_snooping.db') con = create_connection('dhcp_snooping.db') if db_exists: query_for_dhcp = 'insert into dhcp (mac, ip, vlan, interface, switch) values (?, ?, ?, ?, ?)' query_for_switches = 'INSERT into switches values (?, ?)' print('Добавляю данные в таблицу switches...') write_data_to_db(con, query_for_switches, switches) print('Добавляю данные в таблицу dhcp...') write_data_to_db(con, query_for_dhcp, dhcp) else: print( 'База данных не существует. Перед добавлением данных, ее надо создать' )
import sqlite3 from sqlite3 import Error import pandas as pd from create_db import create_connection, create_table, clean_column_names from populate_db import extract_data, insert_records import config table_name = "OFNT3CE1" conn = create_connection(config.database_name) print("\tReading in table data as pandas DataFrame") table_data = pd.read_csv(config.data_folder + table_name + ".csv", dtype=str) columns = table_data.columns columns = ', '.join(columns) columns = clean_column_names(columns) print("\tCreating table in database...") print(columns) create_table(conn, table_name, columns) print("\tExtract records") records = extract_data(table_data) print("\tInserting records into table...") insert_records(conn, table_name, columns, records)
def main(): conn = create_connection('./pythonsqlite.db')
def query_court_computation(db_name): # Part A: Queries our database to construct sentence level data from court commitment and sentence computation for every # infraction resulting in incarceration. (dataset A) start = datetime.datetime.now() query_court_commitment = ''' SELECT A.OFFENDER_NC_DOC_ID_NUMBER as ID, A.COMMITMENT_PREFIX, A.EARLIEST_SENTENCE_EFFECTIVE_DT, A.MOST_SERIOUS_OFFENSE_CODE FROM OFNT3BB1 A WHERE NEW_PERIOD_OF_INCARCERATION_FL = "Y"; ''' conn = create_connection(db_name) court_small = qd.query_db(conn, query_court_commitment) query_sentence_comp = ''' SELECT INMATE_DOC_NUMBER as ID, INMATE_COMMITMENT_PREFIX as COMMITMENT_PREFIX, INMATE_COMPUTATION_STATUS_FLAG, max(ACTUAL_SENTENCE_END_DATE) as END_DATE, max(PROJECTED_RELEASE_DATE_PRD) as PROJ_END_DATE FROM INMT4BB1 GROUP BY INMATE_DOC_NUMBER, INMATE_COMMITMENT_PREFIX; ''' sentence_compute_small = qd.query_db(conn, query_sentence_comp) query_inmt_profile = ''' SELECT INMATE_DOC_NUMBER as ID, INMATE_RECORD_STATUS_CODE, INMATE_ADMIN_STATUS_CODE, DATE_OF_LAST_INMATE_MOVEMENT, TYPE_OF_LAST_INMATE_MOVEMENT, CURRENT_COMMITMENT_PREFIX, INMATE_GENDER_CODE as GENDER, INMATE_RACE_CODE as RACE, INMATE_BIRTH_DATE as BIRTH_DATE, INMATE_ETHNIC_AFFILIATION as ETHNICITY, INMATE_CONTROL_STATUS_CODE as CONTROL_STATUS, INMATE_SPECIAL_CHARACTERISTICS as SPECIAL_CHARS, TOTAL_DISCIPLINE_INFRACTIONS, LATEST_DISCIPLINE_INFRACTION, LAST_DISCIPLINE_INFRACTION_DT FROM INMT4AA1; ''' query_inmt_profile = ''' SELECT INMATE_DOC_NUMBER as ID, INMATE_RECORD_STATUS_CODE, INMATE_ADMIN_STATUS_CODE, DATE_OF_LAST_INMATE_MOVEMENT, TYPE_OF_LAST_INMATE_MOVEMENT, CURRENT_COMMITMENT_PREFIX, INMATE_CONTROL_STATUS_CODE as CONTROL_STATUS FROM INMT4AA1; ''' inmt_profile = qd.query_db(conn, query_inmt_profile) query_offender_profile = ''' SELECT OFFENDER_NC_DOC_ID_NUMBER as ID, OFFENDER_GENDER_CODE as GENDER, OFFENDER_RACE_CODE as RACE, OFFENDER_BIRTH_DATE as BIRTH_DATE, STATE_WHERE_OFFENDER_BORN as STATE_BORN, OFFENDER_ETHNIC_CODE as ETHNICITY, OFFENDER_CITIZENSHIP_CODE as CITIZENSHIP FROM OFNT3AA1; ''' offender_profile = qd.query_db(conn, query_offender_profile) conn.close data = court_small.merge(sentence_compute_small, on=['ID', 'COMMITMENT_PREFIX'], how='outer') data = data.merge(inmt_profile, on=['ID'], how='left') data = data.merge(offender_profile, on=['ID'], how='left') #data = data.merge(disc_infraction, on=['ID'], how='left') stop = datetime.datetime.now() print("Time Elapsed:", stop - start) return data
import json from create_db import create_connection def insert_car(conn, car_dict): vin = car_dict["vin"] mileage = car_dict["mileage"] city = car_dict["city"] state = car_dict["state"] make = car_dict["make"] model = car_dict["model"] year = car_dict["year"] price = car_dict["price"] car_tuple = (vin, mileage, city, state, make, model, year, price) insert_statement = ''' insert into cars(vin, mileage, city, state, make, model, year, price ) values(?,?,?,?,?,?,?,?)''' cur = conn.cursor() cur.execute(insert_statement, car_tuple) return cur.lastrowid with open('data.json', 'r') as f: conn = create_connection("C:\\sqlite\db\pythonsqlite.db") with conn: for json_line in f: car_dictionary = json.loads(json_line) insert_car(conn, car_dictionary)