def main(): db = modals.CloudDB() create_table(db.engine) with open("json_updated.json") as f: json_data = f.read() loc_vals = json.loads(json_data) for keys in loc_vals: convert_json_to_db(keys, loc_vals[keys], db.get_session())
def main(): db = modals.CloudDB() list_of_data = [] first_pass = True rows = 0 with open("baltimorecity3335") as f: baltimore_crime_data = csv.reader(f) for row in baltimore_crime_data: rows += 1 if rows > 50000: # approx 1 year of data break if first_pass: first_pass = False continue dt = row[0] lon = row[10] lat = row[11] crime_type = row[4] data_to_enter = dict() data_to_enter["description"] = crime_type[:39] if len( crime_type) > 39 else crime_type dt = datetime.datetime.strptime(dt, "%m/%d/%Y") data_to_enter["date"] = dt try: data_to_enter["longitude"] = float(lon) data_to_enter["latitude"] = float(lat) except Exception as e: print(e, row) continue entry = modals.feed_master(data_to_enter) list_of_data.append(entry) db.Session.bulk_save_objects(list_of_data) db.Session.commit() db.Session.close()
import modals from sqlalchemy import and_ import datetime ''' utilities to convert data from one database to another ''' db = modals.CloudDB() def convert_master_to_user(interface_schematic): # first iteration of converting master_db entries to user_interface entries # create the sqlAlchemy object to be inserted into the new table usr_schema = modals.UserInterface( latitude=interface_schematic["latitude"], longitude=interface_schematic["longitude"], assault=interface_schematic["assaults"], murder=interface_schematic["murders"], theft=interface_schematic["thefts"], sexual_assault=interface_schematic["rapes"], gta=interface_schematic["gta"], robbery=interface_schematic["robberies"], other=interface_schematic["other"]) return usr_schema def add_to_user_interface(master_list, Session): # Iterates through a list of entries and then saves them in a database all at once. # Much faster than individual commits bulk_entries = [] for entry in master_list: usr_entry = convert_master_to_user(entry)