async def deleteTrip(req: addTrip_payload): cf.logmessage("addTrip api call") space_id = int(os.environ.get('SPACE_ID', 1)) pattern_id = req.pattern_id start_time = req.start_time trip_id = cf.makeUID() i1 = f"""insert into trips (space_id, id, pattern_id, start_time, name) values ({space_id}, '{trip_id}', '{pattern_id}', '{start_time}', '{trip_id}_{start_time}') """ s1 = f"""select stop_sequence, stop_id from pattern_stops where space_id = {space_id} and pattern_id = '{pattern_id}' order by stop_sequence """ df1 = dbconnect.makeQuery(s1, output='df') df2 = df1[['stop_sequence']].copy() df2['space_id'] = space_id df2['trip_id'] = trip_id df2['id'] = cf.assignUID(df1, length=7) df2['arrival_time'] = None df2.at[0, 'arrival_time'] = start_time # to do: populate remaining arrival times also, taking a default speed # and calculating lat-long distance / routed distance status1 = dbconnect.execSQL(i1) status2 = dbconnect.addTable(df2, 'stop_times') returnD = {"message": "success"} returnD['trip_id'] = trip_id returnD['added_stop_times'] = len(df2) return returnD
def addStops(req: addStops_payload): """ Add stops """ cf.logmessage("addStops api call") # convert request body to json array, from https://stackoverflow.com/a/60845064/4355695 requestArr = [t.__dict__ for t in req.data] # print(requestArr) df1 = pd.DataFrame(requestArr) # to do: validation: remove the bad ones # remove duplicates df1 = df1.drop_duplicates('name').copy() df1['space_id'] = int(os.environ.get('SPACE_ID', 1)) df1['id'] = cf.assignUID(df1) timestamp = cf.getTime() df1['created_on'] = timestamp df1['created_by'] = '' # will bring in username later not_added = [] added = [] for row in df1.to_dict(orient='records'): if not row.get('name'): cf.logmessage("No name:", row) continue icols = ['space_id', 'id', 'name', 'created_on', 'created_by', 'zap'] ivals= [f"{row['space_id']}", f"'{row['id']}'", f"'{row['name']}'", \ "CURRENT_TIMESTAMP", f"'{row['created_by']}'", f"'{cf.zapper(row['name'])}'" ] if row.get('latitude'): icols.append('latitude') ivals.append(f"{row['latitude']}") if row.get('longitude'): icols.append('longitude') ivals.append(f"{row['longitude']}") if row.get('description'): icols.append('description') ivals.append(f"'{row['description']}'") if row.get('group_id'): icols.append('group_id') ivals.append(f"'{row['group_id']}'") i1 = f"""insert into stops_master ({','.join(icols)}) values ({','.join(ivals)})""" iCount = dbconnect.execSQL(i1) if not iCount: not_added.append(row) else: added.append(row) returnD = { 'message': "success", "num_added": 0, "num_not_added": 0, "added": [], "not_added": [] } if len(added): returnD['num_added'] = len(added) returnD['added'] = [{ "stop_id": x['id'], "name": x['name'] } for x in added] if len(not_added): returnD['num_not_added'] = len(not_added) returnD['not_added'] = [x['id'] for x in not_added] return returnD
def editPattern(req: editPattern_payload): cf.logmessage("editPattern api call") space_id = int(os.environ.get('SPACE_ID', 1)) # find if existing s1 = f"""select * from patterns where space_id = {space_id} and id='{req.pattern_id}' """ existingPattern = dbconnect.makeQuery(s1, output='oneJson') if not len(existingPattern): raise HTTPException(status_code=400, detail="Could not remove existing sequence") s2 = f"""select * from pattern_stops where space_id = {space_id} and pattern_id='{req.pattern_id}' order by stop_sequence """ existingPatternStops = dbconnect.makeQuery(s2, output='df') # to do : validation of stop ids # delete existing pattern if any if len(existingPatternStops): print("existing:") print(existingPatternStops) d1 = f"""delete from pattern_stops where pattern_id='{req.pattern_id}' """ dCount = dbconnect.execSQL(d1) if not dCount: raise HTTPException(status_code=400, detail="Could not remove existing sequence") else: cf.logmessage("This pattern didn't have stops earlier.") # new pattern df = pd.DataFrame({'stop_id': req.stops}) df['id'] = cf.assignUID(df) df['stop_sequence'] = list(range(1, len(df) + 1)) print("new:") print(df) df['space_id'] = space_id df['pattern_id'] = req.pattern_id status1 = dbconnect.addTable(df, table='pattern_stops') if not status1: raise HTTPException(status_code=400, detail="Could not add sequence") # also update pattern's entry u1 = f"""update patterns set last_updated=CURRENT_TIMESTAMP where id='{req.pattern_id}' """ uCount = dbconnect.execSQL(u1) if not uCount: cf.logmessage( "Warning: could not update the pattern's entry in patterns table, continuing" ) returnD = { "message": "success", "oldCount": len(existingPatternStops), "newCount": len(df) } # update timings entries if the length of the pattern has changed if len(existingPatternStops) != len(df): returnD['numTrips'], returnD['timings_added'], returnD[ 'timings_removed'] = updateTimingsForPattern( req.pattern_id, len(df)) cf.logmessage(returnD) return returnD
async def saveTimings(req: Request): cf.logmessage("saveTimings api call") space_id = int(os.environ.get('SPACE_ID', 1)) returnD = {'message': "success"} # making the api take a custom json array # from https://stackoverflow.com/a/64379772/4355695 (that and other answers) reqD = await req.json() # print(reqD) if (not len(reqD.get('data', []))) or (not isinstance( reqD.get('data', []), list)): raise HTTPException(status_code=400, detail="No data") df1 = pd.DataFrame(reqD['data']).fillna('') if 'stop_id' in df1.columns: del df1['stop_id'] if 'name' in df1.columns: del df1['name'] keepcols = ['stop_sequence'] df2 = pd.melt(df1, id_vars=keepcols, var_name='trip_id', value_name='arrival_time').sort_values( ['trip_id', 'stop_sequence']).reset_index(drop=True) # df2.to_csv('stop_times.csv',index=False) df2['id'] = cf.assignUID(df2, length=7) df2['space_id'] = space_id # TO DO: time validation for N in range(len(df2)): if df2.at[N, 'arrival_time'] == '': df2.at[N, 'arrival_time'] = None tripsList = df2['trip_id'].unique().tolist() if not len(tripsList): raise HTTPException(status_code=400, detail="No tripIds in data") trip_idSQL = cf.quoteNcomma(tripsList) # fully delete existing stop_times for this pattern and replace with new d1 = f"""delete from stop_times where space_id = {space_id} and trip_id in ({trip_idSQL})""" dCount1 = dbconnect.execSQL(d1) returnD['old_count'] = dCount1 # df2.to_csv('sample.csv') iStatus1 = dbconnect.addTable(df2, 'stop_times') if not iStatus1: returnD['new_count'] = 0 raise HTTPException(status_code=400, detail="Failed to add stop_times data in DB") # update trips data # get all start times start_times_lookup = df2[df2['stop_sequence'] == 1][[ 'trip_id', 'arrival_time' ]].copy().set_index('trip_id').to_dict(orient='index') print(start_times_lookup) # to do later: get all end times also. Or, decide to drop that and don't bother. returnD['trips_updated'] = 0 for trip_id in tripsList: uList = [] uList.append(f"last_updated = CURRENT_TIMESTAMP") uList.append(f"modified_by = 'admin'") if start_times_lookup.get(trip_id, False): start_time = start_times_lookup[trip_id]['arrival_time'] uList.append(f"start_time = '{start_time}'") uList.append(f"name = '{trip_id}_{start_time}'") else: uList.append(f"start_time = NULL") uList.append(f"name = '{trip_id}'") u1 = f"""update trips set {', '.join(uList)} where space_id = {space_id} and id = '{trip_id}' """ uCount = dbconnect.execSQL(u1) returnD['trips_updated'] += uCount returnD['new_count'] = len(df2) return returnD
def updateTimingsForPattern(pattern_id, pattern_length): # to do: if a pattern's length has changed, then update timings entries for it space_id = int(os.environ.get('SPACE_ID', 1)) totalAdded = totalRemoved = 0 # find all trips for the pattern s1 = f"""select t1.id as trip_id, t2.id, t2.stop_sequence from trips as t1 left join stop_times as t2 on t1.id = t2.trip_id where t1.space_id = {space_id} and t1.pattern_id = '{pattern_id}' and t2.space_id = {space_id} order by t2.trip_id, t2.stop_sequence """ df_exist_all = dbconnect.makeQuery(s1, output='df', keepCols=True) # tripsList = dbconnect.makeQuery(s1, output='column') if not len(df_exist_all): return 0, 0, 0 tripsList = df_exist_all['trip_id'].unique().tolist() # if not len(tripsList): # return len(tripsList), totalAdded, totalRemoved all_delIds = [] all_df_new = [] for trip_id in tripsList: # get existing cf.logmessage(f"trip_id: {trip_id}") df_exist = df_exist_all[df_exist_all['trip_id'] == trip_id].copy().reset_index(drop=True) # space_id = int(os.environ.get('SPACE_ID',1)) # s1 = f"""select id, stop_sequence from stop_times # where space_id = {space_id} # and trip_id = '{trip_id}' # order by stop_sequence # """ # df_exist = dbconnect.makeQuery(s1, output='df', keepCols=True) if len(df_exist) == pattern_length: # no change needed! continue elif len(df_exist) > pattern_length: # delete flow delIds = df_exist[pattern_length:]['id'].tolist() if len(delIds): all_delIds += delIds else: # add flow newSeq = list(range(len(df_exist) + 1, pattern_length + 1)) df_new = pd.DataFrame({'stop_sequence': newSeq}) df_new['id'] = cf.assignUID(df_new, length=7) df_new['space_id'] = space_id df_new['trip_id'] = trip_id all_df_new.append(df_new) # delete at once if len(all_delIds): delIdsSQL = cf.quoteNcomma(all_delIds) cf.logmessage(f"ids to delete: {all_delIds}") d1 = f"""delete from stop_times where id in ({delIdsSQL}) """ totalRemoved = dbconnect.execSQL(d1) # add at once if len(all_df_new): add_df = pd.concat(all_df_new, sort=False, ignore_index=True) print("add_df:") print(add_df) totalAdded = dbconnect.addTable(add_df, 'stop_times') return len(tripsList), totalAdded, totalRemoved