def next_parkingspace_main(self, spaceId, radius): try: logger.info("Finding next parking space name for spaceid") #nps = NextParkingSpace() marker = self.getDBConnection("52.55.107.13", "cdp", "sysadmin", "sysadmin") query = "select sid, state, providerdetails, boundary, label, levellabel, ts from parking_space where sid = '" + spaceId + "'" marker.execute(query) #marker.execute("""select sid, state, providerdetails, boundary, label, levellabel, ts from parking_space""") data_db = marker.fetchall() data = pd.DataFrame(data_db, columns=[ 'sid', 'state', 'providerdetails', 'geo_pts', 'label', 'levellabel', 'ts' ]) data['geo_pts'] = data['geo_pts'].apply( lambda x: ast.literal_eval(x)) data['geo_pts'] = data['geo_pts'].apply( lambda x: self.conv_2_list(x)) #data=nps.get_geo_parking() data['mean_position'] = data['geo_pts'].apply( lambda x: [np.mean([i[0] for i in x]), np.mean([i[1] for i in x])]) #dist_stat=self.get_traffic_info(data) #road_inf=data['mean_position'].apply(lambda x: self.get_road_info(x,radius)) return self.get_road_info(data['mean_position'][0], radius) except Exception as e: logger.error( "Failed to find next parking space name. {}".format(e)) '''
def get_traffic_info(self, data, grouping_cols='sid'): try: logger.info("Getting traffic info") dist_stat = {} #get traffic info travel time and distance sen_pos = data.groupby(grouping_cols).agg({'geo_pts': 'first'}) dist_df = pd.DataFrame(index=sen_pos.index, columns=sen_pos.index) travel_time_df = pd.DataFrame(index=sen_pos.index, columns=sen_pos.index) traffic_time_df = pd.DataFrame(index=sen_pos.index, columns=sen_pos.index) pt1_pt2 = [] for subset in itertools.combinations(sen_pos.index, 2): pt1_pt2.append(subset) for i in pt1_pt2: temp = self.get_route_info(sen_pos.loc[i[0]]['geo_pts'], sen_pos.loc[i[1]]['geo_pts']) temp1 = self.get_route_info(sen_pos.loc[i[1]]['geo_pts'], sen_pos.loc[i[0]]['geo_pts']) # travelling from a to b dist_df.loc[i[0], i[1]] = temp[0] travel_time_df.loc[i[0], i[1]] = temp[1] traffic_time_df.loc[i[0], i[1]] = temp[2] # travelling from b to a dist_df.loc[i[1], i[0]] = temp1[0] travel_time_df.loc[i[1], i[0]] = temp1[1] traffic_time_df.loc[i[1], i[0]] = temp1[2] dist_stat['dist'] = dist_df dist_stat['travel_time'] = travel_time_df dist_stat['traffic_time'] = traffic_time_df return dist_stat except Exception as e: logger.error("Failed to get traffic info. {}".format(e))
def setup_routes(app): try: """ It used to create routes and responds to the responder using WSGI. Here, we create the routes and call the respective endpoints available on /app/models directory """ app.add_route('/', RootResources()) app.add_route('/{name}', RootNameResources()) app.add_route('/parking/spacepredictions', ParkingPredictions()) app.add_route('/parking/nextparkingspace', NextPSpace()) app.add_route('/parking/surrounding', DynamicParkingBasedOnSurrounding()) app.add_route('/airquality/so2', So2Predictions()) app.add_route('/airquality/humidity', HumidityPredictions()) app.add_route('/airquality/ambienttemp', AmbientTempPredictions()) app.add_route('/gas/weather', Weather()) app.add_route('/gas/airdispersion', airDispersion()) app.add_route('/light/intensity', lightIntensity()) app.add_route('/waste/binroute', wasteBinCollectionRoute()) app.add_route('/parking/all', CompParking()) app.add_route('/crisis/nearbyplace', NearPlaces()) app.add_route('/crisis/nearbytraffic', NearByTraffic()) app.add_route('/crisis/nearbymap', NearByMap()) logger.info('Routes added to APP successfully') except ValueError as e: logger.error('Failed adding routes to APP'.format(e))
def getDBConnection(self, psqlhost, databaseName, userName, password): try: logger.info("Establishing connnection to Postgres") psqlConnection = psycopg2.connect(host=psqlhost, dbname=databaseName, user=userName, password=password) marker = psqlConnection.cursor(); return marker except Exception as e: logger.error("Error in creating connection to DataBase. {}".format(e))
def main(): l = LightIntensityBasedOnSun() logger.info("calling forecast method") s = l.forecast("Banglore", 10) output = {} #output['data'] = json.loads(s.to_json(orient='records', date_format='iso', date_unit='ms')) output['data'] = json.loads(s.to_json(orient='records', date_unit='ms')) print("------------------------------------------") print(json.dumps(output))
def on_get(self, req, resp): try: input_params = req.params spaceId = input_params['spaceid'] dp = DynamicParking() marker = dp.getDBConnection( "52.55.107.13", "cdp", "sysadmin", "sysadmin") # Connection to Postgres DB query = "select n.parkingsapce,n.nearparkingspace, r.rating from next_parking_space as n, parking_space_rating as r where n.parkingsapce = r.parkingsapce and n.parkingsapce = '" + spaceId + "'" marker.execute(query) except KeyError as e: logger.error('Invalid Request.{}'.format(e)) resp.status = falcon.HTTP_400 resp.body = json.dumps({ "Status Code": 400, "Description": "Invalid Request", "title": "Comp Parking Space" }) except psycopg2.OperationalError as e: logger.error('Failed to execute DB Query.{}'.format(e)) resp.status = falcon.HTTP_408 resp.body = json.dumps({ "Status Code": 408, "Description": "Connection Timed Out", "title": "Comp Parking Space" }) except Exception as e: logger.error('Comp Parking Space ended with Error .{}'.format(e)) resp.status = falcon.HTTP_500 resp.body = json.dumps({ "Status Code": 500, "Description": "Internal Server Error", "title": "Comp Parking Space" }) out = marker.fetchall() if len( out ) == 0: #If length of out is zero, which means no data available for the DB query executed so resultant will raise HTTP 204 code logger.info('No data available for location {}.'.format(spaceId)) resp.status = falcon.HTTP_204 resp.body = json.dumps({ "Status Code": 204, "Description": "No data available", "title": "Comp Parking Space" }) output = {} for i in out: output["parkingSpace"] = i[0] output["nearBySpace"] = i[1] output["rating"] = i[2] resp.status = falcon.HTTP_200 #Falcon httpp status code to 200 resp.body = json.dumps( output) # Will return the data to browser in json format
def get_road_info(self, point, radius=50): try: logger.info("Getting road info") link = 'https://traffic.cit.api.here.com/traffic/6.1/flow.json?prox=' + str( point[0] ) + '%2C' + str(point[1]) + '%2C' + str( radius ) + '&responseattributes=sh,fc&app_id=ONe0616Q1jK0RLmeN7fc&app_code=bWQ6Eir1V2KRjaS-5oMOcw' r = requests.get(link) road = self.create_data_1(r.json())[['DE', 'SHP']] return road.drop_duplicates(subset='DE') except Exception as e: logger.error("Failed to get road info. {}".format(e))
def create_data_1(self, dictionary, path_to_save=None): try: logger.info("Getting RWS values from dictionary") RWS = dictionary['RWS'] l = [] logger.info("Extracting values from RWS to create DataFrames") for rw in RWS: EBU_COUNTRY_CODE = rw['EBU_COUNTRY_CODE'] EXTENDED_COUNTRY_CODE = rw['EXTENDED_COUNTRY_CODE'] UNITS = rw['UNITS'] for elem in rw['RW']: DE = elem['DE'] LI = elem['LI'] PBT = elem['PBT'] mid = elem['mid'] FIS = elem['FIS'] for FI in FIS: elem_1 = FI['FI'] for elem_2 in elem_1: LE = elem_2['TMC']['LE'] PC = elem_2['TMC']['PC'] QD = elem_2['TMC']['QD'] SHP = elem_2['SHP'][0]['value'][0] SHP = SHP.split(" ")[0] CF = elem_2['CF'] for elem_3 in CF: FF = elem_3['FF'] CN = elem_3['CN'] JF = elem_3['JF'] SP = elem_3['SP'] SU = elem_3['SU'] TY = elem_3['TY'] l.append([ EBU_COUNTRY_CODE, EXTENDED_COUNTRY_CODE, UNITS, DE, LI, PBT, mid, LE, PC, QD, SHP, FF, CN, JF, SP, SU, TY ]) logger.info("Adding columns to DataFrames") clms = [ 'EBU_COUNTRY_CODE', 'EXTENDED_COUNTRY_CODE', 'UNITS', 'DE', 'LI', 'PBT', 'mid', 'LE', 'PC', 'QD', 'SHP', 'FF', 'CN', 'JF', 'SP', 'SU', 'TY' ] df = pd.DataFrame(l, columns=clms) logger.info("Created DataFrame") df['PBT'] = datetime.datetime.now() #print(json.dumps(rws_list, indent=2)) if path_to_save: if os.path.isfile(path_to_save): df.to_csv(path_to_save, mode='a', header=False, index=False) else: df.to_csv(path_to_save, mode='w', index=False) else: return df except Exception as e: logger.error("Error in creating DataFrames. {}".format(e))
def on_get(self, req, resp): try: input_params = req.params #loading paramters from the URL location = input_params['location'] offset = input_params['offset'] logger.info("Executing LightIntensityBasedOnSun model") light = LightIntensityBasedOnSun() output = light.forecast(location, int(offset)) jsonoutput = { } # Defining the dictionary jsonoutput variable and set its value to empty jsonoutput['data'] = json.loads( output.to_json(orient='records', date_format='iso', date_unit='s')) resp.status = falcon.HTTP_200 resp.body = json.dumps( jsonoutput) #sending data to browser in json format if len( resp.body ) == 0: #If the resp.body is empty(the request doesnt have data, it shows no data available) logger.info( "No data available for location {}.".format(location)) resp.status = falcon.HTTP_204 resp.body = json.dumps({ "Status Code": 204, "Description": "Data not available", "title": "Light Intensity" }) #raise falcon.HTTPError(status="204 Data Not Avilable",title='LightIntensity',description='No data available',code=204) except KeyError as e: logger.error( 'Request doesnt have all the required parameters.{}'.format(e)) resp.status = falcon.HTTP_400 resp.body = json.dumps({ "Status Code": 400, "Description": "Malformed Request", "title": "Light Intensity" }) #raise falcon.HTTPError(status="400 Bad Request",title='LightIntensity',description='The requested URL is not correct',code=400) except Exception as e: logger.error('Exception at Light intensity end point.{}'.format(e)) resp.status = falcon.HTTP_500 resp.body = json.dumps({ "Status Code": 500, "Description": "Internal Server Error", "title": "Light Intensity" })
def get_reviews(self,sen_pts,categ,radius,label='mean_position'): try: logger.info("Getting parking reviews for geo points with in the radius") poi_pos = {} for i in range(sen_pts.shape[0]): poi_pos_1 = {} for cat in categ: pt = sen_pts.iloc[i][label] key=['AIzaSyCS-RjgQk-XsuyTiaRTh040D9iibCsW9zQ','AIzaSyDoZZaxE3da9nD2U2GzC3xs1FWDywWkOiI','AIzaSyC6zF5CWGqw9Mha4aUrEzFsSYw5n3I3raM'] key_i=0 while True: link='https://maps.googleapis.com/maps/api/place/search/json?location='+str(pt[0])+','+str(pt[1])+'&radius='+str(radius)+'&type='+cat+'&key='+key[key_i] #link = 'https://maps.googleapis.com/maps/api/place/search/json?location=' + str(pt[0]) + ',' + str( #pt[1]) + '&radius='+str(radius)+'&type=' + cat + '&key=AIzaSyC6zF5CWGqw9Mha4aUrEzFsSYw5n3I3raM' try: r = requests.get(link) if r.json()['status']=='OVER_QUERY_LIMIT': if key_i == 2: key_i=0 else: key_i+=1 else: break except Exception as e: logger.error("Failed to contact google maps. {}".format(e)) jsn = r.json()['results'] li = [] for poi in jsn: try: is_open = poi['opening_hours']['open_now'] except: is_open = True types = poi['types'] loc = [poi['geometry']['location']['lat'], poi['geometry']['location']['lng']] name=poi['name'] try: rating = poi['rating'] except: rating = np.nan dist = self.haversine(pt, loc) li.append([name,is_open, types, loc, rating, dist]) poi_pos_1[cat] = pd.DataFrame(li, columns=['name','is_open', 'types', 'loc', 'rating', 'dist(km)']) poi_pos[sen_pts.index[i]] = poi_pos_1 return poi_pos except Exception as e: logger.error("Failed to get review for parking. {}".format(e))
def create_app( ): #Create app function instantiates API object and setup API routes try: app = falcon.API(middleware=[ Crossdomain(), # AuthMiddleware(), JSONTranslator() ]) ## Create Falcon API object through which we can add routes logger.info('APP started successfully') setup_routes(app) #Adding routes to falcon API except Exception as e: logger.error('Launching APP failed {}'.format(e)) raise falcon.HTTPError( status="503 APP Unavailable", title='Atlantis APP', description='Atlantis falcon APP failed to start', code=503) return app
def on_get(self, req, resp): try: input_params = req.params #Gather paramaters from the request i.e lat and longs lat = input_params['lat'] lng = input_params['lng'] winfo = WeatherInfo() temperature, windBearing, windDirection, windSpeed, weather, pressure, cloudCoverage, hour, day = winfo.weatherInfo( lat, lng) #output = {"temperature":'+ str(temperature) +', "windBearing":'+str(windBearing)+', "windDirection":'+str(windDirection)+', "windSpeed":'+str(windSpeed)+', "weather":'+str(weather)+', "pressure":'+str(pressure)+', "cloudCoverage":'+str(cloudCoverage)+', "hour":'+str(hour)+', "day":'+str(day)+'} except KeyError as e: logger.error( 'Weather Info request ended with exception.{}'.format(e)) resp.status = falcon.HTTP_400 resp.body = json.dumps({ "Status Code": 400, "Description": "Malformed Request", "title": "Weather Info" }) #raise falcon.HTTPError(status="400 Bad Request",title='Weather Information',description='Invalid Request',code=400) output = {} #Declaring dictiornary variable to empty output["temperature"] = temperature output["windBearing"] = windBearing output["windDirection"] = windDirection output["windSpeed"] = windSpeed output["weather"] = weather output["pressure"] = pressure output["cloudCoverage"] = cloudCoverage output["hour"] = hour output["day"] = day if len( output ) == 0: #if length is zero, then there is no data available for the request logger.info( "No data available for requested Lat {} and Long {}".format( lat, lng)) resp.status = falcon.HTTP_204 resp.body = json.dumps({ "Status Code": 204, "Description": "Data Not Available", "title": "WeatherInfo" }) #raise falcon.HTTPError(status="204 Data Not Avilable",title='Weather Information',description='No data available',code=204) resp.status = falcon.HTTP_200 resp.body = json.dumps(output) #output in json format -> browser
def get_rating(self,sen_pts, categ,radius): try: logger.info("Finding parking rate for categories") poi_pos = self.get_reviews(sen_pts,categ,radius) net_res = {} for key in poi_pos.keys(): temp = poi_pos[key] res_temp = {} for cat in categ: temp1 = temp[cat].fillna(0) try: res_temp[cat]=(temp1.iloc[:,1]*temp1.iloc[:,4]/temp1.iloc[:,5]).mean() #res_temp[cat] = (temp1.iloc[:, 0] * temp1.iloc[:, 3] / temp1.iloc[:, 4]).mean() except: res_temp[cat] = np.nan net_res[key] = res_temp return net_res,poi_pos except Exception as e: logger.error("Failed to find parking rate. {}".format(e))
def poi(self,spaceId,radius): try: logger.info("Finding point of Interest on Parking for spaceid") #dp = DynamicParking() '''marker = dp.getDBConnection("52.55.107.13", "cdp", "sysadmin", "sysadmin") marker.execute("""select sid, state, providerdetails, boundary, label, levellabel, ts from parking_space""") data_db = marker.fetchall() geo_pts = pd.DataFrame(data_db, columns=['sid', 'state', 'providerdetails', 'geo_pts', 'label', 'levellabel', 'ts']) geo_pts['geo_pts'] = geo_pts['geo_pts'].apply(lambda x: ast.literal_eval(x)) geo_pts['geo_pts'] = geo_pts['geo_pts'].apply(lambda x: dp.conv_2_list(x)) print("line after getting data from db")''' logger.info("Fetching Lat and Longs for spaceId") geo_pts = self.get_geo_parking(spaceId) geo_pts['mean_position'] = geo_pts['geo_pts'].apply(lambda x: [np.mean([i[0] for i in x]), np.mean([i[1] for i in x])]) sen_pts = geo_pts.groupby('sid').agg({'mean_position': 'first'}) categ=['mosque','hindu_temple','home_good_store','university','electronics_store','courthouse','restaurant','bank','store','embassy','shopping_mall'] print("Now going to fetch ratings") ratings,poi_info=self.get_rating(sen_pts,categ,radius) return ratings,poi_info,categ except Exception as e: logger.error("Unable to gather POI for parking area. {}".format(e)) '''
def get_route_info(self, point_1, point_2, mode=['fastest', 'car'], app_id='QacvSHflGqkVBJGvs9OS', app_code='9dbgDyDrC1ChasubHX7Xfw', traffic_mode='enabled'): try: logger.info("Getting Route info") #mode='list of modes['fastest','car'] link = 'https://route.cit.api.here.com/routing/7.2/calculateroute.json?waypoint0=' + str( point_1[0][0] ) + '%2C' + str(point_1[0][1]) + '&waypoint1=' + str( point_2[0][0] ) + '%2C' + str(point_2[0][1]) + '&mode=' + '%3B'.join( str(e) for e in mode ) + '%3Btraffic%3A' + traffic_mode + '&app_id=' + app_id + '&app_code=' + app_code + '&departure=now' r = requests.get(link) jsn = r.json() summary = jsn['response']['route'][0]['summary'] return summary['trafficTime'], summary['travelTime'], summary[ 'distance'] except Exception as e: logger.error("Error in finding route info. {}".format(e))
def on_get(self, req, resp): try: input_params = req.params spaceId = input_params['spaceid'] if 'radius' in input_params: radius = int(input_params['radius']) else: logger.info( "Radius value not passed through request url so setting it to 50 by default" ) radius = 50 dp = DynamicParking() ratings, poi_info, categ = dp.poi(spaceId, radius) poi = [] output = {} for cat in categ: for index, row in poi_info[spaceId][cat].iterrows(): if str(row['rating']) == "nan": row['rating'] = 0 poid = { "name": row['name'], "location": row['loc'], "rating": row['rating'], "distance": round(row['dist(km)'], 6), "isOpen": row['is_open'] } poi.append(poid) pr = [] for i in ratings[spaceId]: if str(ratings[spaceId][i]) == "nan": ratings[spaceId][i] = 'NULL' prd = {"typeOfPlace": i, "rating": ratings[spaceId][i]} pr.append(prd) if len(pr) == 0: logger.info( 'No data available for location {}.'.format(spaceId)) resp.status = falcon.HTTP_204 resp.body = json.dumps({ "Status Code": 204, "Description": "No data available", "title": "Dynamic Parking Space" }) output["poi"] = poi #output["prakingRate"]=pr resp.status = falcon.HTTP_200 resp.body = json.dumps(output) except KeyError as e: logger.error('Invalid Request.{}'.format(e)) resp.status = falcon.HTTP_400 resp.body = json.dumps({ "Status Code": 400, "Description": "Invalid Request", "title": "Dynamic Parking Space" }) except Exception as e: logger.info( 'Unable to fetch Dynamic Parking for spaceId.{}'.format(e)) resp.status = falcon.HTTP_500 resp.body = json.dumps({ "Status Code": 500, "Description": "Internal Server Error", "title": "Dynamic Parking Space" })
def on_get(self, req, resp): try: input_params = req.params #loading input parameters from the URL requested in browser location = input_params['location'] if 'spaceid' in input_params: parkingspaceid = input_params['spaceid'] else: logger.info( "parking spaceid not sent through URL so setting it to ALL" ) parkingspaceid = "all" starttime = input_params['starttime'] endtime = input_params['endtime'] resp.status = falcon.HTTP_200 logger.info("calling Forecast method") pf = Forecast() fileDir = os.path.dirname(__file__) #fileName = "parkingforcastModelAll.pickle" fileName = "parkingforcast.pickle" filePath = os.path.join(fileDir, fileName) logger.info("Loading pickle file", filename) models = pickle.load(open(filePath, 'rb')) if len(models.keys()) == 0: logger.info( 'No data available for location {}.'.format(location)) resp.status = falcon.HTTP_204 resp.body = json.dumps({ "Status Code": 204, "Description": "Data not available", "title": "Parking Predictions" }) except KeyError as e: logger.error( 'Parking predictions ended with exception.{}'.format(e)) resp.status = falcon.HTTP_400 resp.body = json.dumps({ "Status Code": 400, "Description": "Invalid Request", "title": "Parking Predictions" }) except FileNotFoundError as e: logger.error('Error loading pickle file.{}'.format(e)) resp.status = falcon.HTTP_404 resp.body = json.dumps({ "Status Code": 404, "Description": "File not found", "title": "Parking Predictions" }) except Exception as e: logger.error( 'Unknow exception at parking predictions.{}'.format(e)) resp.status = falcon.HTTP_500 resp.body = json.dumps({ "Status Code": 500, "Description": "Internal Server Error", "title": "Parking Predictions" }) forecast_values = {} for parking_space in models.keys(): if parkingspaceid == "all": logger.info("set parking spaceid to ALL") forecast_values[parking_space] = pf.get_parkingforecast( models[parking_space], datetime.datetime.fromtimestamp(float(starttime)), datetime.datetime.fromtimestamp(float(endtime))).to_json( orient='records', date_format='iso', date_unit='s') resp.body = forecast_values[parking_space] elif parking_space == parkingspaceid: logger.info("Parking space id set to", parking_space) forecast_values[parking_space] = pf.get_parkingforecast( models[parking_space], datetime.datetime.fromtimestamp(float(starttime)), datetime.datetime.fromtimestamp(float(endtime))).to_json( orient='records', date_format='iso', date_unit='s') resp.body = forecast_values[parking_space] else: logger.info("No forcasting for :: ", parking_space)
def forecast(self, location, offset): try: logger.info( "Getting Latitude and Longitude from map API for location") # Getting Latitude and Longitude from HERE map API result_1 = requests.get( 'https://geocoder.cit.api.here.com/6.2/geocode.json?searchtext=' + location + '/&app_id=QacvSHflGqkVBJGvs9OS&app_code=9dbgDyDrC1ChasubHX7Xfw&gen=8' ) try: out_1 = result_1.json() if len(out_1) == 0: logger.info("No lat and long available for location") except Exception as e: logger.error("Failed with error. {} ".format(e)) Latitude = out_1['Response']['View'][0]['Result'][0]['Location'][ 'NavigationPosition'][0]['Latitude'] Longitude = out_1['Response']['View'][0]['Result'][0]['Location'][ 'NavigationPosition'][0]['Longitude'] # Getting Sunrise and Sunset from Dark Sky API # Access Key - f837b14f3e53cfed0e7cec9e3765e3c5 result_2 = requests.get( 'https://api.darksky.net/forecast/f837b14f3e53cfed0e7cec9e3765e3c5/' + str(Latitude) + ',' + str(Longitude) + '/?exclude=currently,minutely?extend=hourly') try: logger.info("getting Sunrise and Sunset for location") out_2 = result_2.json() except Exception as e: logger.error("Failed to get Sunrise and Sunset. {} ".format(e)) sunrise_time = [] sunset_time = [] dates = [] for x in range(7): sunriseTime = out_2['daily']['data'][x]['sunriseTime'] sunrise_time.append(sunriseTime) sunsetTime = out_2['daily']['data'][x]['sunsetTime'] sunset_time.append(sunsetTime) date = out_2['daily']['data'][x]['time'] dates.append(date) # Converting Sunrise time to proper datetime format sunrise_realtime = [] for i in sunrise_time: your_timestamp = i #date = datetime.fromtimestamp(your_timestamp) + timedelta(hours=5) + timedelta(minutes=30) - timedelta(minutes=offset) date = datetime.fromtimestamp(your_timestamp) - timedelta( minutes=offset) #date = datetime.fromtimestamp(your_timestamp) sunr_date_real = str(date.strftime(str(date))) # print(sunr_date_real) sunrise_realtime.append(sunr_date_real) # Converting Sunset time to proper datetime format sunset_realtime = [] for i in sunset_time: your_timestamp = i #date = datetime.fromtimestamp(your_timestamp) + timedelta(hours=5) + timedelta(minutes=30) + timedelta(minutes=offset) date = datetime.fromtimestamp(your_timestamp) + timedelta( minutes=offset) #date = datetime.fromtimestamp(your_timestamp) suns_date_real = str(date.strftime(str(date))) sunset_realtime.append(suns_date_real) Data = pd.DataFrame() Data['Off_time'] = sunrise_realtime Data['On_time'] = sunset_realtime off = [parse(x) for x in Data['Off_time']] ee_1 = [] for i in off: off_utc = i.strftime("%Y-%m-%dT%H:%M:%S.%f%Z") ee_1.append(off_utc) Data['Off_time'] = pd.DataFrame(ee_1) on = [parse(x) for x in Data['On_time']] ee_2 = [] for i in on: on_utc = i.strftime("%Y-%m-%dT%H:%M:%S.%f%Z") ee_2.append(on_utc) Data['On_time'] = pd.DataFrame(ee_2) return (Data) except Exception as e: logger.error("Error in function forecast. {}".format(e))
def conv_2_list(self, row): logger.info("converting to list") geo_pts = [] for elem_1 in row['geoPoint']: geo_pts.append([elem_1['latitude'], elem_1['longitude']]) return geo_pts
def on_get(self, req, resp): try: #try block to check and catch the exceptions if req is empty input_params = req.params location = input_params['location'] if 'id' in input_params: sensorid = input_params['id'] else: sensorid = "all" #If, sensorid not found in input request, it takes all as value starttime = input_params['starttime'] endtime = input_params['endtime'] resp.status = falcon.HTTP_200 pf = Forecast() fileName = "AirSo2All.pickle" filePath = os.path.join(fileDir, fileName) models = pickle.load(open(filePath, 'rb')) if len(models.keys()) == 0: ##Falcon HTTP Exception handling loop begins http://falcon.readthedocs.io/en/stable/api/errors.html logger.info( 'No data available for location {}.'.format(location)) resp.status = falcon.HTTP_204 resp.body = json.dumps({ "Status Code": 204, "Description": "Data not available", "title": "SO2 Predictions" }) #raise falcon.HTTPError(status="204 Data Not Avilable",title='SO2 Predictions',description='No data available',code=204) except KeyError as e: logger.error('Invalid So2 Request'.format(e)) resp.status = falcon.HTTP_400 resp.body = json.dumps({ "Status Code": 400, "Description": "Invalid Request", "title": "SO2 Predictions" }) #raise falcon.HTTPError(status="400 Bad Request",title='SO2 Predictions',description='Invalid Request',code=400) except FileNotFoundError as e: #Raises exception, if pickle file not found logger.error('Error loading pickle file.{}'.format(e)) resp.status = falcon.HTTP_404 resp.body = json.dumps({ "Status Code": 404, "Description": "File not found", "title": "SO2 Predictions" }) #raise falcon.HTTPError(status="404 Not found",title='SO2 Predictions',description='Not Found',code='404') except Exception as e: logger.error('Unknown error at So2 Predictions.{}'.format(e)) resp.status = falcon.HTTP_500 resp.body = json.dumps({ "Status Code": 500, "Description": "Internal Server Error", "title": "SO2 Predictions" }) #raise falcon.HTTPError(status="500 Internal Server Error",title='SO2 Predictions',description='Internal Server Error',code='500') forecast = {} for sensid in models.keys(): if sensorid == "all": #calling get_parkingforecast forecast[sensid] = pf.get_forecast( models[sensid], datetime.datetime.fromtimestamp(float(starttime)), datetime.datetime.fromtimestamp(float(endtime))).to_json( orient='records', date_format='iso', date_unit='s') elif sensid == sensorid: forecast[sensid] = pf.get_forecast( models[sensid], datetime.datetime.fromtimestamp(float(starttime)), datetime.datetime.fromtimestamp(float(endtime))).to_json( orient='records', date_format='iso', date_unit='s') resp.body = forecast[sensid] else: logger.info("No forcasting for :: ", sensid)
def get_geo_parking(self,spaceId): try: logger.info("Executing get_geo_parking function") import requests data_1 = json.dumps({ "Query": { "Find": { "ParkingSpace": { "sid": { "eq": spaceId } } } } }) # Real time api for parking link = 'https://cdp-jaipur.cisco.com/deveng/fid-CIMQueryInterface?SensorCustomerKey=500001&AppKey=CDP-App&UserKey=500060' headers = {'Content-type': 'application/json', 'body': 'raw'} try: parking_real_time = requests.post(link, data=data_1) # print(parking_real_time.status_code) data = parking_real_time.json() except Exception as e: logger.error("API request failed. {}".format(e)) li = [] colms = ['sid', 'levelLabel', 'operatedBy', 'label', 'occupied', 'total', 'sensorCustomerId', 'hierId', 'siblingIndex' \ , 'provider', 'providerId', 'geo_pts', 'maxDurationMinutes', 'parkingRate_durationMinutes' \ , 'parkingRate_farePerMinute', 'zoneType'] li.append(colms) if data['Find']['Status'] == "NoResult": logger.info("Data or spaceId is not available") raise Exception("Data or spaceId is not available. {}".format(e)) else: for item in data['Find']['Result']: elem = item['ParkingSpace'] sid = elem['sid'] try: levelLabel = elem['levelLabel'] except: levelLabel = np.nan try: operatedBy = elem['operatedBy'] except: operatedBy = np.nan label = elem['label'] occupied = elem['state']['occupied'] total = elem['state']['total'] sensorCustomerId = elem['sensorCustomerId'] hierId = elem['hierId'] try: siblingIndex = elem['siblingIndex'] except: siblingIndex = np.nan provider = elem['providerDetails']['provider'] providerId = elem['providerDetails']['providerId'] geo_pt = elem['boundary'] try: maxDurationMinutes = elem['opParams']['maxDurationMinutes'] parkingRate_durationMinutes = elem['opParams']['parkingRate']['durationMinutes'] parkingRate_farePerMinute = elem['opParams']['parkingRate']['farePerMinute'] zoneType = elem['opParams']['zoneType'] except: maxDurationMinutes = np.nan parkingRate_durationMinutes = np.nan parkingRate_farePerMinute = np.nan zoneType = np.nan geo_pts = [] for elem_1 in geo_pt['geoPoint']: geo_pts.append([elem_1['latitude'], elem_1['longitude']]) li.append( [sid, levelLabel, operatedBy, label, occupied, total, sensorCustomerId, hierId, siblingIndex, provider, providerId \ , geo_pts, maxDurationMinutes, parkingRate_durationMinutes, parkingRate_farePerMinute, zoneType]) data_df = pd.DataFrame(li[1:], columns=li[0]) return data_df except Exception as e: logger.error("geo_parking function failed with errors. {}".format(e))
def on_get(self, req, resp): logger.info("Executing the RootResource class") resp.body = json.dumps({ 'message': 'Hello, World!', }) resp.status = falcon.HTTP_200
def on_get(self, req, resp): try: input_params = req.params location = input_params['location'] if 'id' in input_params: sensorid = input_params['id'] else: sensorid = "all" starttime = input_params['starttime'] endtime = input_params['endtime'] resp.status = falcon.HTTP_200 pf = Forecast() fileDir = os.path.dirname(__file__) fileName = "humidityAll.pickle" #Using the humidity data available in pickefile filePath = os.path.join(fileDir, fileName) models = pickle.load(open(filePath, 'rb')) #Loading the pickle file if len(models.keys()) == 0: logger.info( 'No data available for location {}.'.format(location)) resp.status = falcon.HTTP_204 resp.body = json.dumps({ "Status Code": 204, "Description": "Data not available", "title": "Humidity Predictions" }) #raise falcon.HTTPError(status="204 Data Not Avilable",title='Humidity Predictions',description='No data available',code=204) except KeyError as e: logger.error('Invalid HumidityPredictions Request.{}'.format(e)) resp.status = falcon.HTTP_400 resp.body = json.dumps({ "Status Code": 400, "Description": "Invalid Request", "title": "Humidity Predictions" }) #raise falcon.HTTPError(status="400 Bad Request",title='Humidity Predictions',description='Invalid Request',code=400) except FileNotFoundError as e: logger.error('Error loading pickle file.{}'.format(e)) resp.status = falcon.HTTP_404 resp.body = json.dumps({ "Status Code": 404, "Description": "File not found", "title": "Parking Predictions" }) #raise falcon.HTTPError(status="404 Not found",title='Humidity Predictions',description='Not Found',code='404') except Exception as e: logger.error('Error at So2 predictions.{}'.format(e)) resp.status = falcon.HTTP_500 resp.body = json.dumps({ "Status Code": 500, "Description": "Internal Server Error", "title": "Humidity Predictions" }) #raise falcon.HTTPError(status="500 Internal Server Error",title='Humidity Predictions',description='Internal Server Error',code='500') forecast = {} for sensid in models.keys(): if sensorid == "all": forecast[sensid] = pf.get_forecast( models[sensid], datetime.datetime.fromtimestamp(float(starttime)), datetime.datetime.fromtimestamp(float(endtime))).to_json( orient='records', date_format='iso', date_unit='s') elif sensid == sensorid: forecast[sensid] = pf.get_forecast( models[sensid], datetime.datetime.fromtimestamp(float(starttime)), datetime.datetime.fromtimestamp(float(endtime))).to_json( orient='records', date_format='iso', date_unit='s') resp.body = forecast[sensid] else: logger.info("No forcasting for :: ", sensid)
def on_get(self, req, resp): try: input_params = req.params #takes input params from browser gasName = input_params['gasname'] concentrationLevel = input_params['clevel'] lat = input_params['lat'] lng = input_params['lng'] airDisp = airDispersionModel( ) #calling Air Dispersion function where the alogarith is defined and shows in which way gas is travelling except KeyError as e: logger.error( 'Air Dispersion request ended with exception.{}'.format(e)) resp.status = falcon.HTTP_400 resp.body = json.dumps({ "Status Code": 400, "Description": "Malformed Request", "title": "Air Dispersion" }) #raise falcon.HTTPError(status="400 Bad Request",title='Air Dispersion',description='Invalid Request',code=400) if 'windbearing' in input_params: windBearing = input_params['windbearing'] if 'windspeed' in input_params: print('in both') windSpeed = input_params['windspeed'] print(gasName) print(concentrationLevel, lat, lng) out = airDisp.predictGasDispersionHeaveyGasEnvSensor( str(gasName), float(concentrationLevel), float(lat), float(lng), float(windBearing), float(windSpeed)) else: print('in windbearing') out = airDisp.predictGasDispersionHeaveyGasEnvSensor( str(gasName), float(concentrationLevel), float(lat), float(lng), pWindBearing=float(windBearing)) elif 'windspeed' in input_params: windSpeed = input_params['windspeed'] print('in wind speed') out = airDisp.predictGasDispersionHeaveyGasEnvSensor( str(gasName), float(concentrationLevel), float(lat), float(lng), pWindSpeed=float(windSpeed)) else: print('in nothing') print(gasName) print(concentrationLevel, lat, lng) out = airDisp.predictGasDispersionHeaveyGasEnvSensor( str(gasName), float(concentrationLevel), float(lat), float(lng)) output = {} output["modelName"] = out[0] s1 = out[1].to_json(orient='records') ss1 = json.loads(s1) output["dataset75"] = ss1 if len(out) == 3: s2 = out[2].to_json(orient='records') ss2 = json.loads(s2) output["dataSet25"] = ss2 elif len(out) == 2: output["dataSet25"] = "nodata" if len(output) == 0: logger.info( "No data available for requested Lat {} and Long {}".format( lat, lng)) resp.status = falcon.HTTP_204 resp.body = json.dumps({ "Status Code": 204, "Description": "Data Not Available", "title": "Air Dispersion" }) #raise falcon.HTTPError(status="204 Data Not Avilable",title='Air Dispersion',description='No data available',code=204) resp.status = falcon.HTTP_200 resp.body = json.dumps(output)
def on_get(self, req, resp): try: input_params = req.params #loading input parameters from the URL requested in browser.If params are empty then it will raise exception spaceId = input_params['spaceid'] if 'radius' in input_params: radius = int(input_params['radius']) else: logger.info( "Radius value not passed through request url so setting it to 50 by default" ) radius = 50 nps = NextParkingSpace() marker = nps.getDBConnection("52.55.107.13", "cdp", "sysadmin", "sysadmin") query = "select label.boundary from parking_space where sid=(select nearparkingspace from next_parking_space where parkingsapce='" + spaceId + "')" marker.execute(query) data = marker.fetchall() loc = json.loads( data[0][1] ) #Loading string formatted dictionary to json for converting it to Dictionary object output = {} output["nearBySpace"] = { "spaceName": data[0][0], "location": [ loc['geoPoint'][0]['latitude'], loc['geoPoint'][0]['longitude'] ] } rd = nps.next_parkingspace_main(spaceId, radius) le = [] for index, row in rd.iterrows(): ke = {"Name": row['DE'], "Location": row['SHP']} le.append(ke) output["nearBySpaceName"] = le resp.status = falcon.HTTP_200 resp.body = json.dumps(output) if len(le) == 0: logger.info( 'No data available for spaceID {}.'.format(spaceId)) resp.status = falcon.HTTP_204 resp.body = json.dumps({ "Status Code": 204, "Description": "Data Not Available", "title": "Next Parking Space" }) except KeyError as e: logger.error('Invalid Request.{}'.format(e)) resp.status = falcon.HTTP_400 resp.body = json.dumps({ "Status Code": 400, "Description": "Invalid Request", "title": "Next Parking Space" }) except psycopg2.OperationalError as e: logger.error('Failed to execute DB Query.{}'.format(e)) resp.status = falcon.HTTP_408 resp.body = json.dumps({ "Status Code": 408, "Description": "Connection Timed Out", "title": "Next Parking Space" }) except Exception as e: logger.info('Next Parking Space ended up with error .{}'.format(e)) resp.status = falcon.HTTP_500 resp.body = json.dumps({ "Status Code": 500, "Description": "Internal Server Error", "title": "Next Parking Space" })
def on_post(self, req, resp, name): logger.info("Executing the RootNameResource class") resp.body = json.dumps({ 'message': 'Hello, {}!'.format(name) }) resp.status = falcon.HTTP_200