def transit_eta(tables, static_feed, trip_update_feed, alert_feed, vehicle_position_feed, arg_agency_id, trip2pattern): columns = ['agency_id', 'RecordedDate', 'RecordedTime', 'veh_id', 'veh_lat', 'veh_lon', 'veh_speed', 'veh_location_time', 'route_short_name', 'route_dir', 'day', 'run', 'pattern_id', 'stop_id', 'seq', 'ETA'] try: login = {'host':"localhost", 'user':SQL_USER, 'passwd':SQL_PWD, 'db': "PATHTransit"} run_pattern_df = helper.sql2df('RunPattern', login) except Exception as e: print e tables['TransitETA'] = pd.DataFrame() for entity in trip_update_feed.entity: new_row = {} new_row['agency_id'] = arg_agency_id new_row['RecordedDate'] = str(datetime.datetime.now().strftime('%Y-%m-%d')) new_row['RecordedTime'] = str(datetime.datetime.now().strftime('%H:%M:%S')) new_row['veh_id'] = entity.id new_row['veh_lat'] = entity.vehicle.position.latitude new_row['veh_lon'] = entity.vehicle.position.longitude new_row['veh_speed'] = entity.vehicle.position.speed #VTA and BART are in Bay area so time is the same new_row['veh_location_time'] = str(datetime.datetime.now().strftime('%H:%M:%S')) # new_row['route_short_name'] = runPattern_entry['route_short_name'] # new_row['day'] = runPattern_entry['day'] # new_row['run'] = runPattern_entry['run'] # new_row['pattern_id'] = runPattern_entry['pattern_id'] # new_row['route_dir'] = runPattern_entry['route_dir'] new_row['stop_id'] = entity.vehicle.stop_id new_row['seq'] = entity.vehicle.current_stop_sequence tables["TransitETA"] = tables["TransitETA"].append(pd.Series(new_row), ignore_index=True) helper.write_table(tables, "TransitETA") print "SUCCESS with TransitETA"
def transit_eta(tables, static_feed, trip_update_feed, alert_feed, vehicle_position_feed, agency_id, trip2pattern): columns = [ 'agency_id', 'RecordedDate', 'RecordedTime', 'veh_id', 'veh_lat', 'veh_lon', 'veh_speed', 'veh_location_time', 'route_short_name', 'route_dir', 'day', 'run', 'pattern_id', 'stop_id', 'seq', 'ETA' ] try: login = { 'host': "localhost", 'user': "******", 'passwd': "root", 'db': "TrafficTransit" } run_pattern_df = helper.sql2df('RunPattern', login) except Exception as e: print e tables['TransitETA'] = pd.DataFrame() for entity in trip_update_feed.entity: new_row = {} new_row['agency_id'] = agency_id new_row['RecordedDate'] = str( datetime.datetime.now().strftime('%Y-%m-%d')) new_row['RecordedTime'] = str( datetime.datetime.now().strftime('%H:%M:%S')) new_row['veh_id'] = entity.id new_row['veh_lat'] = entity.vehicle.position.latitude new_row['veh_lon'] = entity.vehicle.position.longitude new_row['veh_speed'] = entity.vehicle.position.speed #VTA and BART are in Bay area so time is the same new_row['veh_location_time'] = str( datetime.datetime.now().strftime('%H:%M:%S')) # new_row['route_short_name'] = runPattern_entry['route_short_name'] # new_row['day'] = runPattern_entry['day'] # new_row['run'] = runPattern_entry['run'] # new_row['pattern_id'] = runPattern_entry['pattern_id'] # new_row['route_dir'] = runPattern_entry['route_dir'] new_row['stop_id'] = entity.vehicle.stop_id new_row['seq'] = entity.vehicle.current_stop_sequence tables["TransitETA"] = tables["TransitETA"].append(pd.Series(new_row), ignore_index=True) helper.write_table(tables, "TransitETA") print "SUCCESS with TransitETA"
def transfers(tables, static_feed, arg_agency_id): tables["Transfers"] = pd.DataFrame() columns = ['from_agency_id', 'from_id', 'to_agency_id', 'to_id', 'transfer_type', 'min_transfer_time', 'transfer_dist'] max_distance = 0.5 #this is in miles #initiate googlemaps for finding minimum transfer time and transfer distance gmaps = googlemaps.Client(key='AIzaSyB_yzsaBUOOo3ukoeDvtjg5Q32IGSkBUvU') #loop through unique patterns in route_stop_seq try: #What is shapes_df also had the trips it is a part of? #load the Route_Stop_Seq for miscellaneous things login = {'host':"localhost", 'user':SQL_USER, 'passwd':SQL_PWD, 'db': "PATHTransit"} route_stop_seq_df = helper.sql2df('Route_stop_seq', login) except Exception as e: print os.getcwd() print e counter = 0 using_google_maps = False #get the unique list of patterns unique_patterns = route_stop_seq_df['pattern_id'].unique() #iterate through the unique list of patterns for from_pattern in unique_patterns: #get one side of the pattern #get stop id's associated with the from_pattern from_pattern_stops = route_stop_seq_df[route_stop_seq_df['pattern_id'] == from_pattern]['stop_id'] for to_pattern in unique_patterns: #get the other pattern if from_pattern != to_pattern: #get stop_id's assoicated with the to_pattern to_pattern_stops = route_stop_seq_df[route_stop_seq_df['pattern_id'] == to_pattern]['stop_id'] #now you have two lists of stops from different patterns, iterate through this list of stops for from_pattern_stop in from_pattern_stops: for to_pattern_stop in to_pattern_stops: if arg_agency_id == 11: #if it's TriDelta, coerce from_pattern_stop to Int from_stop_coord = static_feed['stops'][static_feed['stops']['stop_id'] == int(from_pattern_stop)].iloc[0] to_stop_coord = static_feed['stops'][static_feed['stops']['stop_id'] == int(to_pattern_stop)].iloc[0] elif arg_agency_id == 8: #if it's bart, then leave it as a string from_stop_coord = static_feed['stops'][static_feed['stops']['stop_id'] == from_pattern_stop].iloc[0] to_stop_coord = static_feed['stops'][static_feed['stops']['stop_id'] == to_pattern_stop].iloc[0] else: from_stop_coord = static_feed['stops'][static_feed['stops']['stop_id'] == from_pattern_stop].iloc[0] to_stop_coord = static_feed['stops'][static_feed['stops']['stop_id'] == to_pattern_stop].iloc[0] if using_google_maps: distance_time_result = helper.google_walking_distance_time(from_stop_coord['stop_lat'], from_stop_coord['stop_lon'], to_stop_coord['stop_lat'], to_stop_coord['stop_lon']) else: distance_time_result = {} distance_time_result['distance'] = helper.coordToMiles(from_stop_coord['stop_lat'], from_stop_coord['stop_lon'], to_stop_coord['stop_lat'], to_stop_coord['stop_lon']) distance_time_result['duration'] = "Time Estimation N/A" if distance_time_result['distance'] < max_distance: #if the distance is within boundaries add a new row print distance_time_result counter += 1 print "counter: " + str(counter) new_row = {} new_row['from_agency_id'] = arg_agency_id new_row['from_id'] = from_pattern_stop new_row['to_agency_id'] = 1 new_row['to_id'] = to_pattern_stop new_row['tranfer_type'] = 0 new_row['min_transfer_time'] = distance_time_result['duration'] new_row['transfer_dist'] = distance_time_result['distance'] tables["Transfers"] = tables["Transfers"].append(pd.Series(new_row), ignore_index=True) helper.write_table(tables, "Transfers") print "SUCCESS with Transfers"
def route_point_seq(tables, static_feed, arg_agency_id, trip2pattern): try: #What is shapes_df also had the trips it is a part of? #load the Route_Stop_Seq for miscellaneous things login = {'host':"localhost", 'user':SQL_USER, 'passwd':SQL_PWD, 'db': "PATHTransit"} route_stop_seq_df = helper.sql2df('Route_stop_seq', login) big_points_df = helper.sql2df('Big_Points', login) points_df = helper.sql2df('Points', login) except Exception as e: print os.getcwd() print e counter = 0 tables['Route_Point_Seq2'] = pd.DataFrame() columns = ['agency_id', 'route_short_name', 'route_dir', 'pattern_id', 'shape_id', 'point_id', 'seq', 'length', 'heading', 'dist', 'version'] #find a list of unique trips route_stop_seq_df unique_trips = route_stop_seq_df['trip_id'].unique() #loop through the route_stop_seq_df for current_trip_id in unique_trips: #from route_stop_stop_seq print "current trip id: " + str(current_trip_id) route_info_specific_to_trip = route_stop_seq_df[route_stop_seq_df['trip_id'] == current_trip_id].iloc[0] #use the current trip_id to find the shape_id associatd with the trip current_shape_id = static_feed['trips'][static_feed['trips']['trip_id'] == current_trip_id].iloc[0]['shape_id'] print "Current_shape_id: " + str(current_shape_id) #with the shape_id, you can find a list of way points from shapes.txt waypoints_specific_to_shape_id = static_feed['shapes'][static_feed['shapes']['shape_id'] == current_shape_id] print "WayPoints: " print waypoints_specific_to_shape_id distanceSinceStart = 0 #keep track of distance since start of the trip lastPoint = None for a, row in waypoints_specific_to_shape_id.iterrows(): counter += 1 print "counter: "+ str(counter) new_row = {} #info from route_stop_seq new_row['route_short_name'] = route_info_specific_to_trip['route_short_name'] new_row['route_dir'] = route_info_specific_to_trip['trip_id'] new_row['pattern_id'] = route_info_specific_to_trip['pattern_id'] #info from general new_row['trip_id'] = current_trip_id new_row['agency_id'] = arg_agency_id new_row['shape_id'] = current_shape_id #info from current waypoints loop new_row['shape_pt_sequence'] = row['shape_pt_sequence'] #geting the point_id is incredibly hard because you have to match the point_id with the #current_lon, current_lat with is very technically dificult #how to calculate the length currentLon = row['shape_pt_lon'] currentLat = row['shape_pt_lat'] if lastPoint != None: new_row['length'] = helper.coordToMiles(currentLat, currentLon, lastPoint['current_lat'], lastPoint['current_lon']) distanceSinceStart += new_row['length'] #add the length for the cumumulative distance new_row['dist'] = distanceSinceStart new_row['heading'] = helper.calculate_heading(currentLat, currentLon, lastPoint['current_lat'],lastPoint['current_lon']) lastPoint = {'current_lat': currentLat, 'current_lon': currentLon} else: #the case where we just start the way point new_row['length'] = 0 # seq i - seq i distance is 0 new_row['dist'] = 0 lastPoint = {'current_lat': currentLat, 'current_lon': currentLon} new_row['heading'] = "N/A" #the first point doesn't have any heading #calculate the point_id based on pt_lon, pt_lon big_constant = pow(10, 8) currentBigLon = int(currentLon * big_constant) currentBigLat = int(currentLat * big_constant) point_id = big_points_df[(big_points_df['point_lat'] == currentBigLat) & (big_points_df['point_lon'] == currentBigLon)].iloc[0]['point_id'] new_row['point_id'] = point_id tables['Route_Point_Seq2'] = tables['Route_Point_Seq2'].append(pd.Series(new_row), ignore_index=True) print tables['Route_Point_Seq2'] helper.write_table(tables, "Route_Point_Seq2") print "SUCCESS with Route Point Seq 2"
def schedules(tables, static_feed, arg_agency_id, trip2pattern): try: login = {'host': "localhost", 'user': SQL_USER, 'passwd': SQL_PWD, 'db': "PATHTransit"} run_pattern_df = helper.sql2df('RunPattern', login) agency_name = static_feed['agency'].iloc[0]['agency_name'] trip2pattern = helper.csv2df("Trip2Pattern.csv") #load the trip2pattern csv route_stop_seq_df = helper.sql2df('Route_stop_seq', login) except Exception as e: print e columns = ['agency_id', 'route_short_name', 'start_date', 'end_date', 'day', 'route_dir', 'run', 'pattern_id', 'seq', 'stop_id', 'is_time_point', 'pickup_type', 'drop_off_type', 'arrival_time', 'departure_time', 'stop_headsign', 'trip_id'] tables["Schedules"] = pd.DataFrame() pattern_id_memoization = {} #to save calculations and memory counter = 0 #use the unique tripid's to find the the patternid's specific to that trip for a, row in static_feed['stop_times'].iterrows(): print counter counter += 1 #find the pattern_id(What is the point of pattern id? we already have the stops.) pattern_id = trip2pattern[trip2pattern['trip_id'] == row['trip_id']]['pattern_id'] new_row = {} #agency_id basic info new_row['agency_id'] = arg_agency_id #information from pattern_id current_trip_id = row['trip_id'] if current_trip_id in pattern_id_memoization.keys(): new_row['pattern_id'] = pattern_id_memoization[current_trip_id] print current_trip_id else: #access trip2pattern.csv to match the trip_id with the pattern pattern_id = trip2pattern[trip2pattern['trip_id'] == current_trip_id].iloc[0]['pattern_id'] pattern_id_memoization[current_trip_id] = pattern_id new_row['pattern_id'] = pattern_id_memoization[current_trip_id] print "current trip id: " + str(current_trip_id) # print route_stop_seq_df #get information by matching with route_stop_seq matching by trip_id route_stop_seq_stop_time_specific = route_stop_seq_df[route_stop_seq_df['trip_id'] == current_trip_id] if not route_stop_seq_stop_time_specific.empty: route_stop_seq_stop_time_specific = route_stop_seq_stop_time_specific.iloc[0] new_row['route_short_name'] = route_stop_seq_stop_time_specific['route_short_name'] new_row['route_dir'] = route_stop_seq_stop_time_specific['route_dir'] #get information using run pattern by matching with trip_id run_pattern_stop_time_specific = run_pattern_df[run_pattern_df['trip_id'] == current_trip_id] if not run_pattern_stop_time_specific.empty: run_pattern_stop_time_specific = run_pattern_stop_time_specific.iloc[0] new_row['start_date'] = run_pattern_stop_time_specific['start_date'] new_row['end_date'] = run_pattern_stop_time_specific['end_date'] new_row['day'] = run_pattern_stop_time_specific['day'] new_row['run'] = run_pattern_stop_time_specific['run'] #all information in stop_times.txt new_row['seq'] = row['stop_sequence'] new_row['stop_id'] = row['stop_id'] #time point is optional new_row['is_time_point'] = helper.optional_field(a, 'time_point', static_feed['stop_times']) # new_row['is_time_point'] = row['timepoint'] new_row['pickup_type'] = helper.optional_field(a, 'pickup_type', static_feed['stop_times']) # new_row['pickup_type'] = row['pickup_type'] # new_row['drop_off_type'] = row['drop_off_type'] new_row['pickup_type'] = helper.optional_field(a, 'drop_off_type', static_feed['stop_times']) new_row['arrival_time'] = row['arrival_time'] new_row['departure_time'] = row['departure_time'] # new_row['stop_headsign'] = row['stop_headsign'] new_row['stop_headsign'] = helper.optional_field(a, 'stop_headsign', static_feed['stop_times']) new_row['trip_id'] = row['trip_id'] tables["Schedules"] = tables["Schedules"].append(pd.Series(new_row), ignore_index=True) helper.write_table(tables, "Schedules") print "Sucess with Schedules"
def transfers(tables, static_feed, trip_update_feed, alert_feed, vehicle_position_feed, agency_id, trip2pattern): tables["Transfers"] = pd.DataFrame() columns = [ 'from_agency_id', 'from_id', 'to_agency_id', 'to_id', 'transfer_type', 'min_transfer_time', 'transfer_dist' ] max_distance = 3 #this is in miles #initiate googlemaps for finding minimum transfer time and transfer distance gmaps = googlemaps.Client(key='AIzaSyB_yzsaBUOOo3ukoeDvtjg5Q32IGSkBUvU') #loop through unique patterns in route_stop_seq try: #What is shapes_df also had the trips it is a part of? #load the Route_Stop_Seq for miscellaneous things login = { 'host': "localhost", 'user': "******", 'passwd': "root", 'db': "TrafficTransit" } route_stop_seq_df = helper.sql2df('Route_stop_seq', login) except Exception as e: print os.getcwd() print e counter = 0 using_google_maps = False #get the unique list of patterns unique_patterns = route_stop_seq_df['pattern_id'].unique() #iterate through the unique list of patterns for from_pattern in unique_patterns: #get one side of the pattern #get stop id's associated with the from_pattern from_pattern_stops = route_stop_seq_df[route_stop_seq_df['pattern_id'] == from_pattern]['stop_id'] for to_pattern in unique_patterns: #get the other pattern if from_pattern != to_pattern: #get stop_id's assoicated with the to_pattern to_pattern_stops = route_stop_seq_df[ route_stop_seq_df['pattern_id'] == to_pattern]['stop_id'] #now you have two lists of stops from different patterns, iterate through this list of stops for from_pattern_stop in from_pattern_stops: for to_pattern_stop in to_pattern_stops: if agency_id == 11: #if it's TriDelta, coerce from_pattern_stop to Int from_stop_coord = static_feed['stops'][ static_feed['stops']['stop_id'] == int( from_pattern_stop)].iloc[0] to_stop_coord = static_feed['stops'][ static_feed['stops']['stop_id'] == int( to_pattern_stop)].iloc[0] elif agency_id == 8: #if it's bart, then leave it as a string from_stop_coord = static_feed['stops'][ static_feed['stops']['stop_id'] == from_pattern_stop].iloc[0] to_stop_coord = static_feed['stops'][ static_feed['stops']['stop_id'] == to_pattern_stop].iloc[0] else: from_stop_coord = static_feed['stops'][ static_feed['stops']['stop_id'] == from_pattern_stop].iloc[0] to_stop_coord = static_feed['stops'][ static_feed['stops']['stop_id'] == to_pattern_stop].iloc[0] if using_google_maps: distance_time_result = helper.google_walking_distance_time( from_stop_coord['stop_lat'], from_stop_coord['stop_lon'], to_stop_coord['stop_lat'], to_stop_coord['stop_lon']) else: distance_time_result = {} distance_time_result[ 'distance'] = helper.coordToMiles( from_stop_coord['stop_lat'], from_stop_coord['stop_lon'], to_stop_coord['stop_lat'], to_stop_coord['stop_lon']) distance_time_result[ 'duration'] = "Time Estimation N/A" if distance_time_result[ 'distance'] < max_distance: #if the distance is within boundaries add a new row print distance_time_result counter += 1 print "counter: " + str(counter) new_row = {} new_row['from_agency_id'] = agency_id new_row['from_id'] = from_pattern_stop new_row['to_agency_id'] = 1 new_row['to_id'] = to_pattern_stop new_row['tranfer_type'] = 0 new_row[ 'min_transfer_time'] = distance_time_result[ 'duration'] new_row['transfer_dist'] = distance_time_result[ 'distance'] tables["Transfers"] = tables["Transfers"].append( pd.Series(new_row), ignore_index=True) helper.write_table(tables, "Transfers") print "SUCCESS with Transfers"
def route_point_seq(tables, static_feed, trip_update_feed, alert_feed, vehicle_position_feed, agency_id, trip2pattern): try: #What is shapes_df also had the trips it is a part of? #load the Route_Stop_Seq for miscellaneous things login = { 'host': "localhost", 'user': "******", 'passwd': "root", 'db': "TrafficTransit" } route_stop_seq_df = helper.sql2df('Route_stop_seq', login) big_points_df = helper.sql2df('Big_Points', login) points_df = helper.sql2df('Points', login) except Exception as e: print os.getcwd() print e counter = 0 tables['Route_Point_Seq2'] = pd.DataFrame() columns = [ 'agency_id', 'route_short_name', 'route_dir', 'pattern_id', 'shape_id', 'point_id', 'seq', 'length', 'heading', 'dist', 'version' ] #find a list of unique trips route_stop_seq_df unique_trips = route_stop_seq_df['trip_id'].unique() #loop through the route_stop_seq_df for current_trip_id in unique_trips: #from route_stop_stop_seq print "current trip id: " + str(current_trip_id) route_info_specific_to_trip = route_stop_seq_df[ route_stop_seq_df['trip_id'] == current_trip_id].iloc[0] #use the current trip_id to find the shape_id associatd with the trip current_shape_id = static_feed['trips'][ static_feed['trips']['trip_id'] == current_trip_id].iloc[0]['shape_id'] print "Current_shape_id: " + str(current_shape_id) #with the shape_id, you can find a list of way points from shapes.txt waypoints_specific_to_shape_id = static_feed['shapes'][ static_feed['shapes']['shape_id'] == current_shape_id] print "WayPoints: " print waypoints_specific_to_shape_id distanceSinceStart = 0 #keep track of distance since start of the trip lastPoint = None for a, row in waypoints_specific_to_shape_id.iterrows(): counter += 1 print "counter: " + str(counter) new_row = {} #info from route_stop_seq new_row['route_short_name'] = route_info_specific_to_trip[ 'route_short_name'] new_row['route_dir'] = route_info_specific_to_trip['trip_id'] new_row['pattern_id'] = route_info_specific_to_trip['pattern_id'] #info from general new_row['trip_id'] = current_trip_id new_row['agency_id'] = agency_id new_row['shape_id'] = current_shape_id #info from current waypoints loop new_row['shape_pt_sequence'] = row['shape_pt_sequence'] #geting the point_id is incredibly hard because you have to match the point_id with the #current_lon, current_lat with is very technically dificult #how to calculate the length currentLon = row['shape_pt_lon'] currentLat = row['shape_pt_lat'] if lastPoint != None: new_row['length'] = helper.coordToMiles( currentLat, currentLon, lastPoint['current_lat'], lastPoint['current_lon']) distanceSinceStart += new_row['length'] #add the length for the cumumulative distance new_row['dist'] = distanceSinceStart new_row['heading'] = helper.calculate_heading( currentLat, currentLon, lastPoint['current_lat'], lastPoint['current_lon']) lastPoint = { 'current_lat': currentLat, 'current_lon': currentLon } else: #the case where we just start the way point new_row['length'] = 0 # seq i - seq i distance is 0 new_row['dist'] = 0 lastPoint = { 'current_lat': currentLat, 'current_lon': currentLon } new_row[ 'heading'] = "N/A" #the first point doesn't have any heading #calculate the point_id based on pt_lon, pt_lon big_constant = pow(10, 8) currentBigLon = int(currentLon * big_constant) currentBigLat = int(currentLat * big_constant) point_id = big_points_df[ (big_points_df['point_lat'] == currentBigLat) & (big_points_df['point_lon'] == currentBigLon )].iloc[0]['point_id'] new_row['point_id'] = point_id tables['Route_Point_Seq2'] = tables['Route_Point_Seq2'].append( pd.Series(new_row), ignore_index=True) print tables['Route_Point_Seq2'] helper.write_table(tables, "Route_Point_Seq2") print "SUCCESS with Route Point Seq 2"
def schedules(tables, static_feed, trip_update_feed, alert_feed, vehicle_position_feed, agency_id, trip2pattern): try: login = { 'host': "localhost", 'user': "******", 'passwd': "root", 'db': "TrafficTransit" } run_pattern_df = helper.sql2df('RunPattern', login) agency_name = static_feed['agency'].iloc[0]['agency_name'] trip2pattern = helper.csv2df( "Trip2Pattern.csv") #load the trip2pattern csv route_stop_seq_df = helper.sql2df('route_stop_seq', login) except Exception as e: print e columns = [ 'agency_id', 'route_short_name', 'start_date', 'end_date', 'day', 'route_dir', 'run', 'pattern_id', 'seq', 'stop_id', 'is_time_point', 'pickup_type', 'drop_off_type', 'arrival_time', 'departure_time', 'stop_headsign', 'trip_id' ] tables["Schedules"] = pd.DataFrame() pattern_id_memoization = {} #to save calculations and memory counter = 0 #use the unique tripid's to find the the patternid's specific to that trip for a, row in static_feed['stop_times'].iterrows(): print counter counter += 1 #find the pattern_id(What is the point of pattern id? we already have the stops.) pattern_id = trip2pattern[trip2pattern['trip_id'] == row['trip_id']]['pattern_id'] new_row = {} #agency_id basic info new_row['agency_id'] = agency_id #information from pattern_id current_trip_id = row['trip_id'] if current_trip_id in pattern_id_memoization.keys(): new_row['pattern_id'] = pattern_id_memoization[current_trip_id] print current_trip_id else: #access trip2pattern.csv to match the trip_id with the pattern pattern_id = trip2pattern[trip2pattern['trip_id'] == current_trip_id].iloc[0]['pattern_id'] pattern_id_memoization[current_trip_id] = pattern_id new_row['pattern_id'] = pattern_id_memoization[current_trip_id] print "current trip id: " + str(current_trip_id) # print route_stop_seq_df #get information by matching with route_stop_seq matching by trip_id route_stop_seq_stop_time_specific = route_stop_seq_df[ route_stop_seq_df['trip_id'] == current_trip_id] if not route_stop_seq_stop_time_specific.empty: route_stop_seq_stop_time_specific = route_stop_seq_stop_time_specific.iloc[ 0] new_row['route_short_name'] = route_stop_seq_stop_time_specific[ 'route_short_name'] new_row['route_dir'] = route_stop_seq_stop_time_specific[ 'route_dir'] #get information using run pattern by matching with trip_id run_pattern_stop_time_specific = run_pattern_df[ run_pattern_df['trip_id'] == current_trip_id] if not run_pattern_stop_time_specific.empty: run_pattern_stop_time_specific = run_pattern_stop_time_specific.iloc[ 0] new_row['start_date'] = run_pattern_stop_time_specific[ 'start_date'] new_row['end_date'] = run_pattern_stop_time_specific['end_date'] new_row['day'] = run_pattern_stop_time_specific['day'] new_row['run'] = run_pattern_stop_time_specific['run'] #all information in stop_times.txt new_row['seq'] = row['stop_sequence'] new_row['stop_id'] = row['stop_id'] #time point is optional new_row['is_time_point'] = helper.optional_field( a, 'time_point', static_feed['stop_times']) # new_row['is_time_point'] = row['timepoint'] new_row['pickup_type'] = helper.optional_field( a, 'pickup_type', static_feed['stop_times']) # new_row['pickup_type'] = row['pickup_type'] # new_row['drop_off_type'] = row['drop_off_type'] new_row['pickup_type'] = helper.optional_field( a, 'drop_off_type', static_feed['stop_times']) new_row['arrival_time'] = row['arrival_time'] new_row['departure_time'] = row['departure_time'] # new_row['stop_headsign'] = row['stop_headsign'] new_row['stop_headsign'] = helper.optional_field( a, 'stop_headsign', static_feed['stop_times']) new_row['trip_id'] = row['trip_id'] tables["Schedules"] = tables["Schedules"].append(pd.Series(new_row), ignore_index=True) helper.write_table(tables, "Schedules") print "Sucess with Schedules"