def co_list(chat_id, command, globrand): if (command == 'Where are you?'): import geocoder import urllib url = 'http://myexternalip.com/raw' bot.sendLocation(chat_id, geocoder.ip(urllib.urlopen(url).read()).latlng[0], geocoder.ip(urllib.urlopen(url).read()).latlng[1]) mystring = command if (mystring.partition(" ")[0] == 'Say,'): vari = mystring.partition(" ")[2] if globrand <= 5: bot.sendMessage(chat_id, vari.rpartition('or')[0]) else: bot.sendMessage(chat_id, vari.rpartition('or')[2]) if (command == 'Where are you all?'): bot.sendMessage(chat_id, 'I am here!') if (command == 'Screenshot'): app = QApplication(sys.argv) QPixmap.grabWindow(QApplication.desktop().winId()).save('screenshot.jpg', 'jpg') bot.sendPhoto(chat_id, open('screenshot.jpg', 'rb')) if (command == 'RAM usage'): bot.sendMessage(chat_id, 'Nearly {}% of RAM is used.'.format(virtual_memory().percent)) if (command == 'Who is your creator?'): bot.sendMessage(chat_id, 'His nick is E_KOsh...') bot.sendMessage(chat_id, "You might want to write him... Don't be so shy - @E_KOsh") if (command == 'CPU usage'): bot.sendMessage(chat_id, "About {}% of my CPU power is used.".format(cpu_percent())) if (command == 'What is the time?'): bot.sendMessage(chat_id, str(datetime.datetime.now())) if (command == 'Uptime'): u = round(uppp() / 3600, 1) bot.sendMessage(chat_id, 'I am already working for {} hours.'.format(u))
def emailOpen(e): d = {} if request.cookies.get('LATrackingID'): a = modules.getModel(models.App, appid = request.cookies.get('LATrackingID')) d['app_id'] = a.id d['private_ip'] = request.environ.get('REMOTE_ADDR') d['public_ip'] = request.environ.get('HTTP_X_FORWARDED_FOR') d['full_url'] = request.environ.get('HTTP_REFERER', '').strip().lower() email = db.session.query(models.Email).filter_by(emailid=e).first() if email: d['email_id'] = email.id else: return jsonify(**{'status':'failure', 'description':'no such email found'}) if d['public_ip']: g = geocoder.ip(d['public_ip']) d['lat'], d['lng'] = g.latlng d['city'] = g.city d['country'] = g.country d['state'] = g.state d['user_agent'] = request.environ.get('HTTP_USER_AGENT') if d['user_agent']: user_agent = parse(d['user_agent']) d['browser'] = user_agent.browser.family d['is_bot'], d['is_mobile'], d['is_tablet'], d['is_pc'] = user_agent.is_bot, user_agent.is_mobile, user_agent.is_tablet, user_agent.is_pc p = models.Visit(**d) p.date = datetime.now() db.session.add(p) db.session.commit() return jsonify(success=True, description='successfully tracked email')
def _redirect(l): d = {} d['private_ip'] = request.environ.get('REMOTE_ADDR') d['public_ip'] = request.environ.get('HTTP_X_FORWARDED_FOR') d['full_url'] = request.environ.get('HTTP_REFERER', '').strip().lower() if request.cookies.get('LATrackingID'): a = modules.getModel(models.App, appid = request.cookies.get('LATrackingID')) d['app_id'] = a.id link = db.session.query(models.Link).filter_by(linkid=l).first() if link: red_url = link.url d['link_id'] = link.id error = 'successfully tracked link' else: return jsonify(**{'status':'failure', 'description':'no such link found'}) if d['public_ip']: g = geocoder.ip(d['public_ip']) d['lat'], d['lng'] = g.latlng d['city'] = g.city d['country'] = g.country d['state'] = g.state d['user_agent'] = request.environ.get('HTTP_USER_AGENT') if d['user_agent']: user_agent = parse(d['user_agent']) d['browser'] = user_agent.browser.family d['is_bot'], d['is_mobile'], d['is_tablet'], d['is_pc'] = user_agent.is_bot, user_agent.is_mobile, user_agent.is_tablet, user_agent.is_pc p = models.Visit(**d) p.date = datetime.now() db.session.add(p) db.session.commit() return redirect(red_url, code=302)
def parse_args(request): """ Parse args from request, return dict. """ comp_form = dict(zip( request.GET.keys(), request.GET.values())) params = {} params['browser_type'] = request.META['HTTP_USER_AGENT'] ip = get_ip(request) params['ip_addr'] = ip geo = geocoder.ip(ip) params['city'] = geo.city params['country'] = geo.country params['monthly_budget'] = int(comp_form['monthlyBudget']) params['discount'] = int(comp_form.get('discount', 0)) params['list_price'] = int(comp_form['totalPrice']) params['extra_price'] = int(comp_form.get('extraPrice', 0)) params['depreciation_id'] = int(comp_form['depreciationId']) params['tax'] = int(comp_form['tax']) params['px_amount'] = int(comp_form.get('pxAmount', 0)) params['deposit_amount'] = int(comp_form.get('depositAmount', 0)) params['term'] = int(comp_form['term']) if comp_form.get('foHP') == 'True': hp_data = json.loads(comp_form.get('hp').decode('cp1252')) params['hp'] = True params['hp_term'] = hp_data.get('term', 0) params['hp_loan_rate'] = hp_data['loan_at'] / 100.0 if comp_form.get('foPCP') == 'True': pcp_data = json.loads(comp_form.get('pcp').decode('cp1252')) params['pcp'] = True params['pcp_term'] = pcp_data.get('term', 0) params['pcp_loan_rate'] = pcp_data['loan_at'] / 100.0 params['pcp_ballon_value'] = pcp_data['ballon_value'] if comp_form.get('foLease') == 'True': lease_data = json.loads(comp_form.get('lease').decode('cp1252')) params['lease'] = True params['lease_term'] = lease_data.get('term') params['lease_extras'] = lease_data.get('extras', 0) params['lease_initial_payment'] = lease_data['initial_payment'] params['lease_monthly_payment'] = lease_data['monthly'] params['lease_predicted_mileage'] = lease_data.get('actual_annual', 0) params['lease_included_mileage'] = lease_data.get('include_mileages', 0) params['lease_excess_mile_price'] = lease_data.get('price_per_mile', 0) if comp_form.get('foLoan') == 'True': loan_data = json.loads(comp_form.get('loan').decode('cp1252')) params['loan'] = True params['loan_term'] = loan_data.get('term', 0) params['loan_loan_rate'] = loan_data['loan_at'] / 100.0 params['loan_loan_at_end'] = loan_data.get('loan_at_end',0) return params
def get_temperature(): g = geocoder.ip('me') owm = pyowm.OWM('65a34e19f4ad83823846b8fd1a1813fd') location = g.city+","+g.country observation = owm.weather_at_place(location) w = observation.get_weather() temperature = w.get_temperature('celsius')['temp'] return temperature
def get_context_data(self, **kwargs): context = super(LocationMixin, self).get_context_data(**kwargs) visitor_ip = self.request.META.get('REMOTE_ADDR', None) latlng = {'lat': 37.774929, 'lng': -122.419416} if visitor_ip is not None: g = geocoder.ip(visitor_ip) if g.lat is not None and g.lng is not None: latlng.update(lat=g.lat, lng=g.lng) context['latlng'] = latlng return context
def addPeer(hostport, db): ip, port = url_to_ip_port( hostport ) ipport = ip+':'+str(port) if ip not in ['127.0.0.1', '::1']: cursor = db.execute('''SELECT hostname FROM hosts WHERE hostname = ?''',(ipport,)) if not len(cursor.fetchall()): g = geocoder.ip(ip) country = pycountry.countries.get(alpha_2=g.country) cursor = db.execute('''INSERT INTO hosts(hostname, lat, lng, city, state, country) VALUES(?,?,?,?,?,?)''',(ipport, g.lat, g.lng, g.city, g.state, country.name)) db.commit()
def get_location_by_ip(ip): location = None loc = geocoder.ip(ip) if loc and loc.ok: location, created = Location.objects.get_or_create(location=Point(loc.lng, loc.lat), defaults={'address': loc.address}) else: record = gip.record_by_addr(ip) if record: location, created = Location.objects.get_or_create(location=Point(record.get('longitude'), record.get('latitude')), defaults={ 'address': '%s, %s'%(record.get('city'), record.get('country_name')) if record.get('city') else record.get('country_name') }) return location
def ip_to_location(ip): """ Получение геоданных об ip-адресе. Подставляется в статистику звонков. :param ip: ip-адрес клиента :return: геоданные по ip-адресу """ if ip in (None, ''): return '' import geocoder ret = geocoder.ip(ip).address # print('ip_to_location: %s -> %s' % (ip, str(ret))) return ret
def get_my_ip(): if request.args.get('ip'): ip = request.args['ip'] else: fwd = request.environ.get('HTTP_X_FORWARDED_FOR', None) if fwd is None: fwd = request.environ.get('REMOTE_ADDR') ip = fwd.split(',')[0] g = geocoder.ip(ip) tz = g.timezone offset = int(timezone(tz).localize(datetime.now()).strftime('%z'))/100 return jsonify(**{'ip': ip, 'offset':offset, 'tz':tz, 'city':g.city, 'country':g.country, 'state':g.state})
def listing_list(request): g = geocoder.ip('me') lat= g.latlng[0] lng = g.latlng[1] c = connection.cursor() try: results = Listings.objects.raw('SELECT * FROM getCloseListings(%s, %s)', [lat, lng]) #results = c.fetchall() print(results) finally: c.close() return render(request, 'listings/listing_list.html', {'listings': results})
def get_geolocation(ipaddress): "check latitude and longitude of the ip" result = {'city': None, 'lat': 0, ' lon': 0} try: # force disable insecure request warning requests.packages.urllib3.disable_warnings() match = geocoder.ip(ipaddress) result['location'] = '%s, %s' % (match.city, match.country) result['lat'] = float(match.lat) result['lon'] = float(match.lng) except ValueError as error: print error sys.exit(1) return result
def calc_my_position_ip(output=MY_POSITION_FILENAME): setup_output(output) if not config.get('activated', False): logger.info('Exiting: not activated') return logger.info('Waiting %s seconds...', WAIT_BEFORE_QUERY) time.sleep(WAIT_BEFORE_QUERY) logger.info('Querying the server about my ip...') response = geocoder.ip('me') logger.info('LatLng: %s', response.latlng) logger.info('Place: %s', response.address) return calc_my_position_address(response.address, output)
def _get_weather(self, request, force_update, weather_cookie): """Get Current Weather Based on IP Address Based on the current ip location, getthe current weather (or use NY if location is not available.) Save that data to a cookie to reduce # of api calls. Arguments: request -- A Flast Request force_update {bool} -- Should we force update the weather data weather_cookie -- Stored weather data """ # Get current weather for location based on IP if weather_cookie and not force_update: from_cookie = True; current_weather = json.loads(weather_cookie) else: from_cookie = False # fallback latitute/longitude data test_latlngs = { 'newyork': [40.7081, -73.9571], 'hawaii': [19.8968, 155.5828], 'trinidad': [10.65, -61.5167], 'sweden': [60.1282, 18.6435], 'australia': [25.2744, 133.7751], } default_latlng = test_latlngs['newyork'] # Get the visitors IP and lat/lng for that IP ip = request.headers.get('X-Forwarded-For', request.remote_addr) geo = geocoder.ip(ip) if ip != '127.0.0.1' else None lat, lng = geo.latlng if geo and len(geo.latlng) == 2 else default_latlng # Use Darksky to get the current forcast for that lat/lng geo_forecast = forecast(FORECAST_KEY, lat, lng) # Get and format the current weather daily_weather = geo_forecast['daily']['data'][0] current_weather = geo_forecast['currently'] current_weather['timezone'] = geo_forecast['timezone'] current_weather['units'] = geo_forecast['flags']['units'] # F or C current_weather['sunriseTime'] = daily_weather['sunriseTime'] current_weather['sunsetTime'] = daily_weather['sunsetTime'] current_weather['ip'] = ip current_weather['lat_lng'] = [lat, lng] return {'current': current_weather, 'from_cookie': from_cookie}
def get_weather(location): if location is None: g = geocoder.ip('me') else: g = geocoder.google(location) url = "https://api.darksky.net/forecast/{}/{},{}?exclude=minutely,hourly,alerts,flags?UNITS={}".format(DARKSKY_API_KEY, g.lat, g.lng, UNITS) response = requests.get(url).json() table_week = [["", "", "HI", "at", "LO", "at", "", ""]] for day in response['daily']['data']: date = datetime.datetime.fromtimestamp(int(day['time'])).strftime('%a %b %-d') summary = day['summary'] max_temp = temp_format(colors.RED, day['temperatureMax']) max_temp_time = readable_time(day['temperatureMaxTime']) min_temp = temp_format(colors.BLUE, day['temperatureMin']) min_temp_time = readable_time(day['temperatureMinTime']) percip_chance = str(int(day['precipProbability'] * 100)) + "%" try: percip_type = day['precipType'].title() except KeyError: percip_type = "" table_day = [date, summary, max_temp, max_temp_time, min_temp, min_temp_time, percip_chance, percip_type] table_week.append(table_day) table_week[1][0] = colors.BOLD + colors.GREEN + "* Today *" + colors.ENDC title = colors.BOLD + " {}, {} - Current Temp: {} ".format(g.city, g.state, (str(int(response['currently']['temperature'])) + UNIT_LETTER)) + colors.ENDC # AsciiTable table_instance = AsciiTable(table_week, title) table_instance.justify_columns[2] = 'right' # Output print("\n" + colors.BOLD + response['daily']['summary'] + colors.ENDC + "\n") print(table_instance.table)
def page_not_found(error): with requests.Session() as session: requester_ip = request.access_route[0] if requester_ip == '127.0.0.1': place, latlng, elev = 'nc', [35.6921, -80.4357], 218.2 address: str = u'On Library Park: 35\N{DEGREE SIGN} 41\' 31.9\"N 80\N{DEGREE SIGN} 26\' 8.67\"W' else: place, latlng = 'geocode', geocoder.ip(requester_ip, key=GOOGLE_API_KEY).latlng requester_geocode = geocoder.google(latlng, key=GOOGLE_API_KEY, method='reverse', session=session) # Use the defined address, implies we are using a static location, if undefined make it the geocoded one. try: address except NameError: address = str(requester_geocode.address) # Use the defined elevation, implies we are using a static location, if undefined make it the geocoded one. try: requester_geocode.elevation = lambda: None requester_geocode.elevation.meters = lambda: None setattr(requester_geocode.elevation, 'meters', elev) except NameError: requester_geocode.elevation = geocoder.elevation(latlng, key=GOOGLE_API_KEY, session=session) # Get the timezone requester_geocode.timeZoneId = geocoder.timezone(latlng, key=GOOGLE_API_KEY, session=session).timeZoneId return render_template('404.html', place=place, address=address, latlng=latlng, elevation=requester_geocode.elevation.meters, ip=requester_ip), 404
def showMaps(request): g = geocoder.ip('me') map = folium.Map(location=g.latlng, zoom_start=15) maps = map._repr_html_() return render(request, 'home.html', {'map': maps})
import os import sys import json import mysql.connector import geocoder import googlemaps g = geocoder.ip('me').latlng #"57.8765","1.98765" def hos(): places = gmaps.places_nearby(location=(co[0], co[1]), rank_by='distance', type='hospital', name='Hospital', language='en') print(places['results'][0]['plus_code']['global_code']) hospital = [] for i in range(len(places['results'])): t = places['results'][i]['name'] if ('hospital' in t.lower()): hospital.append([t, places['results'][i]['id']]) return hospital def pol(co): places = gmaps.places_nearby(location=(co[0], co[1]), rank_by='distance', type='police', name='police',
float(vals[i][12]), c=colorpicker(int(vals[i][10])), s=30, zorder=2, alpha=1) plt.annotate(vals[i][3] + " " + vals[i][2] + "\n" + "Latency:" + vals[i][10] + "ms", (float(vals[i][11]), float(vals[i][12])), size=6) except: pass df_csv._set_value(i, 'response_time', vals[i][10]) df_csv._set_value(i, 'longitude', vals[i][11]) df_csv._set_value(i, 'latitude', vals[i][12]) self_loc = geocoder.ip('me') map.scatter(self_loc.lng, self_loc.lat, c='blue', s=90, zorder=3, alpha=1) plt.annotate("I am here!", (self_loc.lng, self_loc.lat), size=7) df_csv.to_csv('nameservers.csv', index=False) print('That took {} seconds...'.format(time.time() - starttime)) print("IP Data inserted successfully into table...") print("Plotting Map...") plt.show() print("Map Closed...") # Created by : Vrishab V Srivatsa # Credits : Geocoder API, https://public-dns.info # Inspired by a Cloudflare internship application task # Please credit if used for your personal/academic uses # Twitter: @vsrivatsa25 https://twitter.com/vsrivatsa25?lang=da # Instagram: @vsrivatsa25 https://www.instagram.com/vsrivatsa25/?hl=en
def get_current_location(): import geocoder g = geocoder.ip("me") return g.y, g.x
def current_loc(location): g = geocoder.ip('me') latlng = g.latlng lat = str(latlng[0]) longi = str(latlng[1]) return lat, longi
def getCenter(self): data = geocoder.ip('me').latlng
def get_address(): g = geocoder.ip('me') return g.address
def get_latlng(): g = geocoder.ip('me') new= str(g.latlng[0])+ " "+ str(g.latlng[1]) return new
def insert(): error = 'tracked visit. Nothing more to see here' status = 'success' try: d = {} d['private_ip'] = request.environ.get('REMOTE_ADDR') d['public_ip'] = request.environ.get('HTTP_X_FORWARDED_FOR') d['full_url'] = request.environ.get('HTTP_REFERER', '').strip().lower() if 'appid' in request.form: if 'event' in request.form: d['visit_id'] = db.session.query(models.Visit).filter_by(full_url=d['full_url'], public_ip=d['public_ip'], private_ip=d['private_ip']).order_by('-id').first().id d['event_type'] = request.form['event_type'].lower() d['element_id'] = request.form['element_id'].lower() d['element_type'] = request.form['element_type'].lower() d['element_tag'] = request.form['element_tag'].lower() if 'public_ip' in d: del d['public_ip'] if 'private_ip' in d: del d['private_ip'] if 'full_url' in d: del d['full_url'] e = models.Event(**d) e.date = datetime.now() db.session.add(e) db.session.commit() return jsonify(**{'status':'success', 'description':'event recorded'}) app = modules.getModel(models.App, appid = request.form['appid']) if not app: return jsonify(**{'status':'failure', 'description':'no app found with that id'}) if app.website.base not in d['full_url']: return jsonify(**{'status':'failure', 'description':'app is for a different website'}) ur = d['full_url'].replace('https://','').replace('http://','').replace('www.','').lower().strip() if '/' not in ur: ur += '/' base, d['after'] = ur[:ur.index('/')], ur[ur.index('/')+1:] d['website_id'] = app.website.id if len(d['after']) <= 1: d['after'] = None elif d['after'][-1] == '/': d['after'] = d['after'][:-1] if '?' in ur: if d['after']: d['after'] = d['after'].split('?')[0] d['gets'] = ur.split('?')[1] if len(d['gets']) <= 1: d['gets'] = None d['secure'] = 'https://' in d['full_url'] else: return jsonify(**{'status':'failure', 'description':'no recognized action taken'}) if d['public_ip']: g = geocoder.ip(d['public_ip']) d['lat'], d['lng'] = g.latlng d['city'] = g.city d['country'] = g.country d['state'] = g.state d['user_agent'] = request.environ.get('HTTP_USER_AGENT') if d['user_agent']: user_agent = parse(d['user_agent']) d['browser'] = user_agent.browser.family d['is_bot'], d['is_mobile'], d['is_tablet'], d['is_pc'] = user_agent.is_bot, user_agent.is_mobile, user_agent.is_tablet, user_agent.is_pc p = models.Visit(**d) p.date = datetime.now() db.session.add(p) db.session.commit() except Exception as e: error = repr(e) status = 'failure' return jsonify(**{'status':status, 'description':error})
def find_lng(x, y, z): return math.atan(z / math.sqrt(x + y)) # ------------------------------------------------------------------- # # Global variables planets = ['ISS', 'Mercury', 'Venus', 'Earth', 'Mars', 'Jupiter', 'Saturn', 'Neptune', 'Uranus', 'Pluto', 'Sun'] my_coord = [0, 0, 0] obj_coord = [0, 0, 0] obj = '' message = '' earthradius = 6371 # Gets my coordinates me = geocoder.ip('me') # Get date and time of request and format it to print timeNow = strftime("%Y/%m/%d/%H/%M/%S").split('/', 5) date = 'Date: ' + timeNow[2] + '/' + timeNow[1] + '/' + timeNow[0] + ' - ' + timeNow[3] + ':' + timeNow[4] + ':' + timeNow[5] print(date) # ------------------------------------------------------------------- # # ----------------------- Set server -------------------------------- # # Your IP address HOST = '192.168.1.69' PORT = 8080 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind((HOST, PORT))
def print_ephemeris(): # set the location to report for with requests.Session() as session: requester_ip = request.access_route[0] if str(request.path) == '/nc' or str(request.path) == '/erikshus': place, latlng, elev = 'nc', [35.6921, -80.4357], 218.2 address: str = u'On Library Park: 35\N{DEGREE SIGN} 41\' 32\"N 80\N{DEGREE SIGN} 26\' 9\"W' elif str(request.path) == '/gammelhus': place, latlng, elev = 'gammelhus', [42.1064, -76.2624], 248.7168 address: str = u'Under the streetlamp: 42\N{DEGREE SIGN} 06\' 23\"N 76\N{DEGREE SIGN} 15\' 45\"W' elif str(request.path) == '/kopernik': place, latlng, elev = 'kopernik', [42.0020, -76.0334], 528 address: str = u'Kopernik Observatory: 42\N{DEGREE SIGN} 0\' 7\"N 76\N{DEGREE SIGN} 2\' 0\"W' elif str(request.path) == '/deetop': place, latlng, elev = 'deetop', [41.9700, -75.6700], 284 address: str = u'Dee-Top Observatory: 41\N{DEGREE SIGN} 58\' 12\"N 75\N{DEGREE SIGN} 40\' 12\"W' elif str(request.path) == '/stjohns': place, latlng, elev = 'stjohns', [47.5675, -52.7072], 83 address: str = u'St. John\'s: 47\N{DEGREE SIGN} 34\' 3\"N 52\N{DEGREE SIGN} 42\' 26\"W' elif str(request.path) == '/greenwich': place, latlng, elev = 'greenwich', [51.4768, -0.0005], 47.1526 address: str = u'Greenwich Observatory: 51\N{DEGREE SIGN} 28\' 38\"N 0\N{DEGREE SIGN} 0\' 0\"' else: if requester_ip == '127.0.0.1': place, latlng, elev = 'nc', [35.6921, -80.4357], 218.2 address: str = u'On Library Park: 35\N{DEGREE SIGN} 41\' 32\"N 80\N{DEGREE SIGN} 26\' 9\"W' else: place, latlng = 'geocode', geocoder.ip(requester_ip, key=GOOGLE_API_KEY, session=session).latlng # Start with a discovered geocode. requester_geocode = geocoder.google(latlng, method='reverse', key=GOOGLE_API_KEY, session=session) # Use the defined address, implies we are using a static location, if undefined make it the geocoded one. try: address except NameError: address = str(requester_geocode.address) # Use the defined elevation, implies we are using a static location, if undefined make it the geocoded one. try: requester_geocode.elevation = lambda: None requester_geocode.elevation.meters = lambda: None setattr(requester_geocode.elevation, 'meters', elev) except NameError: requester_geocode.elevation = geocoder.elevation(latlng, key=GOOGLE_API_KEY, session=session) # Get the timezone requester_geocode.timeZoneId = geocoder.timezone(latlng, key=GOOGLE_API_KEY, session=session).timeZoneId # noinspection PyPep8 return render_template('print_times.html', place=place, sunset_string=twilight('sunset', requester_geocode), sunrise_string=twilight('sunrise', requester_geocode), civil_end_string=twilight('civil_end', requester_geocode), civil_begin_string=twilight('civil_begin', requester_geocode), nautical_end_string=twilight('nautical_end', requester_geocode), nautical_begin_string=twilight('nautical_begin', requester_geocode), amateur_end_string=twilight('amateur_end', requester_geocode), amateur_begin_string=twilight('amateur_begin', requester_geocode), astro_end_string=twilight('astronomical_end', requester_geocode), astro_begin_string=twilight('astronomical_begin', requester_geocode), moonrise_string=twilight('moonrise', requester_geocode), moonset_string=twilight('moonset', requester_geocode), moon_phase_string=twilight('moon_phase', requester_geocode), moonset_ante_astro_noon_p=twilight('moonset_ante_astro_noon_p', requester_geocode), address=address, latlng=latlng, elevation=requester_geocode.elevation.meters, ip=requester_ip)
def conversation(matched_index): global user user = db_inf['name'][matched_index] if matched_index == 0: print('welcome admin,how can i help you') say('welcome admin,how can i help you') else: print('welcome to personel jarvis, how can i help you') say('welcome to personel jarvis, how can i help you') while True: word = speech_recognizer() print('Jarvis :recognized word :', word) log_(word, False) res = '' if 'google' in word or 'browser' in word or 'youtube' in word or 'tell me' in word or 'what' in word or 'recomended' in word or 'search' in word or 'website' in word: browser(word) word = mouse_() elif 'stop song' in word or 'play' in word or 'paly song' in word: mouse_(word) elif 'who are you' in word or 'give your intro' in word: res = "i'm a personel jarvis of pankaj.i can also help you by providing some imformation if you want" print('Jarvis :' + res) say(res) elif 'hello' in word or 'hi' in word: res = 'hii!...how can i help you' print('jarvis :' + res) say(res) elif 'manufacturer' in word: res = 'pankaj kumar,he made me durring summer training in techienest,jaipur by saurabh sir.at that time he was persuing B.tech from nit jalandhar' print('Jarvis :' + res) say(res) elif 'how are you' in word: res = 'fine and i am not intrested in know your f*****g mood,want any help than stay otherwise get out from here' print('Jarvis :' + res) say(res) elif "wikipedia" in word: Wikipedia() elif "today's date" in word or 'date' in word: d = str(date.today()) res = "today's date is :" + d print('Jarvis :' + res) say(res) elif 'time' in word or 'current time' in word: time = t.ctime() time = str(time.split(' ')[4]) res = 'current time :' + time print('Jarvis :' + res) say('current time is' + str(time.split(':')[0:2])) elif "weather" in word or "temperature" in word: say("Tell your city") log_("Tell your city") city_name = speech_recognizer() print("city you said is", city_name) #city_name=input("enter city name to confirm") api_key = "cca979ed5fb2c8d3a9c99594191482f9" base_url = "http://api.openweathermap.org/data/2.5/weather?" complete_url = base_url + "appid=" + api_key + "&q=" + city_name json_data = requests.get(complete_url).json() try: temp = json_data['main'] temp = str(int(int(temp['temp']) - 273.15)) temp1 = json_data['weather'][0]['description'] d = " Current Temperature in " + city_name + " is " + temp + " degree celsius with " + temp1 print("Jarvis : ", d) say(d) log_(d) except KeyError: print("Key invalid or city not found") elif "location" in word: g = geocoder.ip('me') lat = g.latlng str1 = "latitude position is " + str(lat[0]) str2 = "longitude position is " + str(lat[1]) print("Jarvis: ", str1) print("Jarvis: ", str2) d = str1 + str2 log_(d) say(str1) say(str2) elif 'bye' in word or 'bye jarvis' in word: log_(word) break log_(res) if matched_index == 0: if 'open vlc' in word or 'play song' in word: #print('processing...') #say('processing...') global player player = vlc_player(word) if 'next song' in word or 'pause' in word or 'play' in word or 'stop song' in word or 'close' in word: try: player = vlc_player(word, player) except: pass
import geocoder myloc = geocoder.ip('me') print(myloc.city)
def GetLatLng(self, ip_address=None): if ip_address: return geocoder.ip(ip_address).latlng else: return geocoder.ip('me').latlng
import geocoder import requests import gui import os from dotenv import load_dotenv load_dotenv() # %% Call openWeatherMap's onecall api # exclude should be a string with each section seperated by a comma (no spaces) def oneCallRequest(apiKey : str, latlng, exclude): data = requests.get(f"http://api.openweathermap.org/data/2.5/onecall?lat={latlng[0]}&lon={latlng[1]}&exclude={exclude}&units=metric&appid={apiKey}").json() return data # %% Get data data = oneCallRequest(os.environ.get("WEATHER_APIKEY"), geocoder.ip("me").latlng, "minutely,alerts") # %% Start GUI app = PyQt5.QtWidgets.QApplication([]) app.setStyleSheet(open("./Resources/stylesheets/WeatherApp.css").read()) main = gui.WeatherApp() #initializes everything in the window main.setWeatherIcon(data["current"]["weather"][0]["icon"]) main.setWeatherName(data["current"]["weather"][0]["description"]) main.setTemperature(data["current"]["temp"],data["current"]["feels_like"]) for i in range(len(data["hourly"])): item = data["hourly"][i] main.addHOElement(gui.hourlyOverview(main, item["weather"][0]["icon"], item["pop"], item["dt"], item["temp"], item["feels_like"])) for i in range(len(data["daily"])):
def get_altitude(): g = geocoder.ip('me') gmaps = googlemaps.Client(key='AIzaSyCZXHznz1W3hZSbWqHZXtj6T1euVxyBitk') altitudes=gmaps.elevation((g.latlng[0],g.latlng[1])) final_altitude = altitudes[0]['elevation'] return final_altitude
def getLocation(self): ''' Finds latitude and longitude given user's IP address. ''' return geocoder.ip('me').latlng
def get_ip_coordinates(ip): g = geocoder.ip(ip) return g.lat, g.lng
def __next__(self): mylocation = geocoder.ip('me') latlong = mylocation.latlng omw = pyowm.OWM('1bfbe76e150ede97d065c9304b1958e0') return omw.weather_manager().weather_at_coords(latlong[0],latlong[-1])
def latitudelocation(): g = geocoder.ip('me') lat = float(g.lat) return lat
#!/usr/bin/env python import geocoder g = geocoder.ip('111.65.248.132') print(g.city)
def zipcodelocation(): g = geocoder.ip('me') zp = str(g.postal) return zp
def get_lat_long(): g = geocoder.ip('me') print(g.latlng)
import time import re from nltk.corpus import stopwords from nltk.stem import WordNetLemmatizer from textblob import TextBlob import geocoder warnings.filterwarnings('ignore') global unique_name unique_name = set() API_KEY = 'AIzaSyC3O-BdZRrBuOmC_nCvWcbnCdmxEWTztLg' gmaps = googlemaps.Client(key=API_KEY) global stop_words stop_words = set(stopwords.words('english')) file = 'Data/osm/amenities-vancouver.json.gz' location = geocoder.ip('me') loc = location.latlng # loc = [49.282761666666666, -123.12364166666666] cuisine_style = [ 'acadian', 'afghan', 'american', 'arab', 'brazilian', 'buddhist', 'burmese', 'cambodian', 'caribbean', 'chinese', 'cuban', 'czech', 'dutch', 'ethiopian', 'filipino', 'french', 'german', 'greek', 'hong kong', 'indian', 'indonesian', 'irish', 'italian', 'jamaican', 'japanese', 'korean', 'lebanese', 'malaysian', 'malaysian', 'mediterranean', 'mexican', 'mexican', 'mongolian', 'moroccan', 'persian', 'peruvian', 'portuguese', 'singaporean', 'taiwanese', 'thai', 'turkish', 'ukranian', 'vietnamese', 'west_coast' ]
def send_anonymous_stats(start_time, debug=False): """ Send anonymous usage statistics Example use: current_stat = return_stat_file_dict(csv_file) add_update_csv(csv_file, 'stat', current_stat['stat'] + 5) """ if debug: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) try: client = InfluxDBClient(STATS_HOST, STATS_PORT, STATS_USER, STATS_PASSWORD, STATS_DATABASE) # Prepare stats before sending uptime = (time.time() - start_time) / 86400.0 # Days add_update_csv(STATS_CSV, 'uptime', uptime) version_num = db_retrieve_table_daemon(AlembicVersion, entry='first') version_send = version_num.version_num if version_num else 'None' add_update_csv(STATS_CSV, 'alembic_version', version_send) outputs = db_retrieve_table_daemon(Output) add_update_csv(STATS_CSV, 'num_relays', outputs.count()) inputs = db_retrieve_table_daemon(Input) add_update_csv(STATS_CSV, 'num_sensors', inputs.count()) add_update_csv(STATS_CSV, 'num_sensors_active', inputs.filter(Input.is_activated.is_(True)).count()) conditionals = db_retrieve_table_daemon(Conditional) add_update_csv(STATS_CSV, 'num_conditionals', conditionals.count()) add_update_csv( STATS_CSV, 'num_conditionals_active', conditionals.filter(Conditional.is_activated.is_(True)).count()) pids = db_retrieve_table_daemon(PID) add_update_csv(STATS_CSV, 'num_pids', pids.count()) add_update_csv(STATS_CSV, 'num_pids_active', pids.filter(PID.is_activated.is_(True)).count()) triggers = db_retrieve_table_daemon(Trigger) add_update_csv(STATS_CSV, 'num_triggers', triggers.count()) add_update_csv(STATS_CSV, 'num_triggers_active', triggers.filter(Trigger.is_activated.is_(True)).count()) functions = db_retrieve_table_daemon(CustomController) add_update_csv(STATS_CSV, 'num_functions', functions.count()) add_update_csv( STATS_CSV, 'num_functions_active', functions.filter(CustomController.is_activated.is_(True)).count()) actions = db_retrieve_table_daemon(Actions) add_update_csv(STATS_CSV, 'num_actions', actions.count()) methods = db_retrieve_table_daemon(Method) add_update_csv(STATS_CSV, 'num_methods', methods.count()) add_update_csv( STATS_CSV, 'num_methods_in_pid', pids.filter(PID.setpoint_tracking_type == 'method').count()) add_update_csv( STATS_CSV, 'num_setpoint_meas_in_pid', pids.filter(PID.setpoint_tracking_type == 'input-math').count()) country = geocoder.ip('me').country if not country: country = 'None' add_update_csv(STATS_CSV, 'country', country) add_update_csv( STATS_CSV, 'ram_use_mb', resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / float(1000)) add_update_csv(STATS_CSV, 'Mycodo_revision', MYCODO_VERSION) add_update_csv( STATS_CSV, 'master_branch', int(os.path.exists(os.path.join(INSTALL_DIRECTORY, '.master')))) # Combine stats into list of dictionaries new_stats_dict = return_stat_file_dict(STATS_CSV) formatted_stat_dict = [] for each_key, each_value in new_stats_dict.items(): if each_key != 'stat': # Do not send header row formatted_stat_dict = add_stat_dict(formatted_stat_dict, new_stats_dict['id'], each_key, each_value) # Send stats to secure, remote influxdb server (only write permission) client.write_points(formatted_stat_dict) logger.debug("Sent anonymous usage statistics") return 0 except requests.ConnectionError: logger.debug("Could not send anonymous usage statistics: Connection " "timed out (expected if there's no internet or the " "server is down)") except InfluxDBServerError as except_msg: logger.error("Statistics: InfluxDB server error: {}".format( except_msg['error'])) except Exception as except_msg: logger.exception( "Could not send anonymous usage statistics: {err}".format( err=except_msg)) return 1
def get_location_values(my_ip): g = geocoder.ip(str(my_ip)) return [g.lat, g.lng, g.city, g.state, g.country]
def location(): g = geocoder.ip('me') return "{}, {}".format(g.latlng[0], g.latlng[1])
# -*- coding: utf-8 -*- import geocoder import json import requests # forecast.io api key: f = open('/home/christoph/.config/forecast-io-key') key = f.readline().rstrip() f.close() # Get current longitude and latitude based on IP address: loc = geocoder.ip(requests.get('http://ipinfo.io/ip').text.rstrip()) city = loc.locality # Get weather info: root_url = 'https://api.forecast.io/forecast/' weather = requests.get(root_url + key + '/' + str(loc.lat) + ',' + str(loc.lng)) weather = json.loads(weather.text) # Convert temperature to Celcius: temperature = round((weather['currently']['temperature'] - 32) * 5/9) # Get current weather condition: condition = weather['currently']['summary'] # Translate city into Irish: cdict = dict({'Boston': {'irish': 'Bostún'}}) try: cathair = cdict[city]['irish'] except KeyError:
def send_anonymous_stats(start_time): """ Send anonymous usage statistics Example use: current_stat = return_stat_file_dict(csv_file) add_update_csv(csv_file, 'stat', current_stat['stat'] + 5) """ try: client = InfluxDBClient(STATS_HOST, STATS_PORT, STATS_USER, STATS_PASSWORD, STATS_DATABASE) # Prepare stats before sending uptime = (time.time() - start_time) / 86400.0 # Days add_update_csv(STATS_CSV, 'uptime', uptime) version_num = db_retrieve_table_daemon(AlembicVersion, entry='first') version_send = version_num.version_num if version_num else 'None' add_update_csv(STATS_CSV, 'alembic_version', version_send) outputs = db_retrieve_table_daemon(Output) add_update_csv(STATS_CSV, 'num_relays', get_count(outputs)) inputs = db_retrieve_table_daemon(Input) add_update_csv(STATS_CSV, 'num_sensors', get_count(inputs)) add_update_csv(STATS_CSV, 'num_sensors_active', get_count(inputs.filter(Input.is_activated == True))) conditionals = db_retrieve_table_daemon(Conditional) add_update_csv(STATS_CSV, 'num_conditionals', get_count(conditionals)) add_update_csv( STATS_CSV, 'num_conditionals_active', get_count(conditionals.filter(Conditional.is_activated == True))) pids = db_retrieve_table_daemon(PID) add_update_csv(STATS_CSV, 'num_pids', get_count(pids)) add_update_csv(STATS_CSV, 'num_pids_active', get_count(pids.filter(PID.is_activated == True))) lcds = db_retrieve_table_daemon(LCD) add_update_csv(STATS_CSV, 'num_lcds', get_count(lcds)) add_update_csv(STATS_CSV, 'num_lcds_active', get_count(lcds.filter(LCD.is_activated == True))) math = db_retrieve_table_daemon(Math) add_update_csv(STATS_CSV, 'num_maths', get_count(math)) add_update_csv(STATS_CSV, 'num_maths_active', get_count(math.filter(Math.is_activated == True))) methods = db_retrieve_table_daemon(Method) add_update_csv(STATS_CSV, 'num_methods', get_count(methods)) add_update_csv(STATS_CSV, 'num_methods_in_pid', get_count(pids.filter(PID.method_id != ''))) timers = db_retrieve_table_daemon(Timer) add_update_csv(STATS_CSV, 'num_timers', get_count(timers)) add_update_csv(STATS_CSV, 'num_timers_active', get_count(timers.filter(Timer.is_activated == True))) country = geocoder.ip('me').country if not country: country = 'None' add_update_csv(STATS_CSV, 'country', country) add_update_csv( STATS_CSV, 'ram_use_mb', resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / float(1000)) add_update_csv(STATS_CSV, 'Mycodo_revision', MYCODO_VERSION) # Combine stats into list of dictionaries new_stats_dict = return_stat_file_dict(STATS_CSV) formatted_stat_dict = [] for each_key, each_value in new_stats_dict.items(): if each_key != 'stat': # Do not send header row formatted_stat_dict = add_stat_dict(formatted_stat_dict, new_stats_dict['id'], each_key, each_value) # Send stats to secure, remote influxdb server (only write permission) client.write_points(formatted_stat_dict) logger.debug("Sent anonymous usage statistics") return 0 except requests.ConnectionError: logger.debug("Could not send anonymous usage statistics: Connection " "timed out (expected if there's no internet or the " "server is down)") except Exception as except_msg: logger.exception( "Could not send anonymous usage statistics: {err}".format( err=except_msg)) return 1
def send_anonymous_stats(start_time): """ Send anonymous usage statistics Example use: current_stat = return_stat_file_dict(csv_file) add_update_csv(csv_file, 'stat', current_stat['stat'] + 5) """ try: client = InfluxDBClient(STATS_HOST, STATS_PORT, STATS_USER, STATS_PASSWORD, STATS_DATABASE) # Prepare stats before sending uptime = (time.time() - start_time) / 86400.0 # Days add_update_csv(STATS_CSV, 'uptime', uptime) version_num = db_retrieve_table_daemon( AlembicVersion, entry='first') version_send = version_num.version_num if version_num else 'None' add_update_csv(STATS_CSV, 'alembic_version', version_send) outputs = db_retrieve_table_daemon(Output) add_update_csv(STATS_CSV, 'num_relays', get_count(outputs)) inputs = db_retrieve_table_daemon(Input) add_update_csv(STATS_CSV, 'num_sensors', get_count(inputs)) add_update_csv(STATS_CSV, 'num_sensors_active', get_count(inputs.filter(Input.is_activated == True))) conditionals = db_retrieve_table_daemon(Conditional) add_update_csv(STATS_CSV, 'num_conditionals', get_count(conditionals)) add_update_csv(STATS_CSV, 'num_conditionals_active', get_count(conditionals.filter(Conditional.is_activated == True))) pids = db_retrieve_table_daemon(PID) add_update_csv(STATS_CSV, 'num_pids', get_count(pids)) add_update_csv(STATS_CSV, 'num_pids_active', get_count(pids.filter(PID.is_activated == True))) lcds = db_retrieve_table_daemon(LCD) add_update_csv(STATS_CSV, 'num_lcds', get_count(lcds)) add_update_csv(STATS_CSV, 'num_lcds_active', get_count(lcds.filter(LCD.is_activated == True))) math = db_retrieve_table_daemon(Math) add_update_csv(STATS_CSV, 'num_maths', get_count(math)) add_update_csv(STATS_CSV, 'num_maths_active', get_count(math.filter(Math.is_activated == True))) methods = db_retrieve_table_daemon(Method) add_update_csv(STATS_CSV, 'num_methods', get_count(methods)) add_update_csv(STATS_CSV, 'num_methods_in_pid', get_count(pids.filter(PID.method_id != ''))) country = geocoder.ip('me').country if not country: country = 'None' add_update_csv(STATS_CSV, 'country', country) add_update_csv(STATS_CSV, 'ram_use_mb', resource.getrusage( resource.RUSAGE_SELF).ru_maxrss / float(1000)) add_update_csv(STATS_CSV, 'Mycodo_revision', MYCODO_VERSION) # Combine stats into list of dictionaries new_stats_dict = return_stat_file_dict(STATS_CSV) formatted_stat_dict = [] for each_key, each_value in new_stats_dict.items(): if each_key != 'stat': # Do not send header row formatted_stat_dict = add_stat_dict(formatted_stat_dict, new_stats_dict['id'], each_key, each_value) # Send stats to secure, remote influxdb server (only write permission) client.write_points(formatted_stat_dict) logger.debug("Sent anonymous usage statistics") return 0 except requests.ConnectionError: logger.debug("Could not send anonymous usage statistics: Connection " "timed out (expected if there's no internet or the " "server is down)") except Exception as except_msg: logger.exception( "Could not send anonymous usage statistics: {err}".format( err=except_msg)) return 1
def getLocation(): g = geocoder.ip('me') return str(g.latlng[0]) + "," + str(g.latlng[1]) + ";"
def test_ip(): g = geocoder.ip(ip) assert g.ok
def longitudelocation(): g = geocoder.ip('me') lng = float(g.lng) return lng
'61.129.7.20', '61.147.122.129', '61.147.234.78', '61.147.234.79', '61.155.170.129', '61.155.201.38', '61.155.201.96', '74.208.105.171' ] with open('webPage/server_map.json') as file: geoip = json.load(file) with open('/usr/local/mad/report/server_map.json', 'w') as file: json.dump(geoip, file) resip = list() for count, ip in enumerate(ipset[166:]): if ipaddress.ip_address(ip).is_private: print(count + 167, ip, 'private address') continue latlng = geocoder.ip(ip).latlng # try: # r = requests.get(f'http://ipinfo.io/{ip}?token={TOKEN}') # j = r.json()['loc'] # l = j.split(',') # latlng = (float(l[0]), float(l[1])) # except requests.exceptions.ConnectionError: # latlng = None print(count + 167, ip, latlng) ### if latlng: geoip.append(dict( name=ip, latLng=latlng, )) if latlng is None: resip.append((ip, 0))
def get_location(gmaps): ''' Ask user to either input or speak their adress and convert it into lat,lng''' while True: print("Do you want to input your address by:") print("\t1. Speech, or") print("\t2. Typing, or") print("\t3. Auto detect.") time.sleep(1.4) choice = input("Your choice (1, 2 or 3): ") if choice != '' and choice.isdigit() and (int(choice) in [1,2,3]): choice = int(choice) break else: print("\nInput not recognized, please re-enter.") logging.debug(f"User choice: {choice}") if choice == 1: userAddress = recognize_speech() if userAddress != None: while True: print("Detected Address:") print(userAddress) answer = input("Is this your address? (y/n)") if check_answer(answer): break elif not check_answer(answer): answer = input("Retry? (y/n): ") if check_answer(answer): userAddress = recognize_speech() else: while True: userAddress = input("Enter your Adress: ") if userAddress != '': break print(f"Address: {userAddress}") else: while True: userAddress = input("Enter your Adress: ") if userAddress != '': break elif choice == 2: while True: userAddress = input("Enter your Adress: ") if userAddress != '': break elif choice == 3: g = geocoder.ip('me') userLocation = str(g.latlng[0]) + ',' + str(g.latlng[1]) temp = gmaps.reverse_geocode(g.latlng) userAddress = temp[0]['formatted_address'] logging.debug(f"Detected user address: {userAddress}, attitude: {userLocation}") print(f"We have detected your address: {userAddress}") userInput = input("Is this correct? (y/n):") if check_answer(userInput): return userLocation, userAddress else: while True: userAddress = input("Enter your Adress: ") if userAddress != '': break geocodeResult = gmaps.geocode(userAddress) if len(geocodeResult) == 0: logging.error(f"Unable to recognize user Address: {userAddress}") print("Unable to identify address") exit() attitude = geocodeResult[0]['geometry']['location'] userLocation = str(attitude['lat']) + ',' + str(attitude['lng']) logging.debug(f"Input adress: {userAddress}, attitude: {userLocation}") return userLocation, userAddress
def getCity(): global city g = geocoder.ip('me') city = g.city
def fillFichaMascota(): form = UpdateMascotaDesaparecidaForm() cur = conn.cursor(buffered=True, dictionary=True) query = f"SELECT STATE FROM MASCOTA WHERE ID_MASCOTA={session['id_last_pet']}" cur.execute(query) estado: int = cur.fetchone()['STATE'] cur.close() ip = 'me' try: if request.headers.get('X-Real-Ip'): ip = request.headers.get('X-Real-Ip') print(ip) except: print('no se consiguio la direccion') g = geocoder.ip(ip) latitud = g.latlng[0] longitud = g.latlng[1] if form.validate_on_submit(): cur = conn.cursor(buffered=True) try: if estado == 0: query = f"SELECT actualiza_mascota({session['id_last_pet']}, '{form.comentario.data}', {form.latitud.data}, {form.longitud.data}, '{form.nombre.data}','{form.fecha.data}',{form.edad.data})" cur.execute(query) response: bool = cur.fetchone()[0] else: query = f"SELECT actualiza_mascota({session['id_last_pet']}, '{form.comentario.data}', {form.latitud.data}, {form.longitud.data}, '','{form.fecha.data}',0)" cur.execute(query) response: bool = cur.fetchone()[0] if form.negro.data: query = f"SELECT agrega_pelaje({session['id_last_pet']}, 1)" cur.execute(query) if form.blanco.data: query = f"SELECT agrega_pelaje({session['id_last_pet']}, 2)" cur.execute(query) if form.marron.data: query = f"SELECT agrega_pelaje({session['id_last_pet']}, 3)" cur.execute(query) if form.gris.data: query = f"SELECT agrega_pelaje({session['id_last_pet']}, 4)" cur.execute(query) if form.amarillo.data: query = f"SELECT agrega_pelaje({session['id_last_pet']}, 5)" cur.execute(query) photos = form.photo.raw_data for item in photos: foto = Image.open(item) # Compresion de imagenes antes de guardar en base de datos thumb_io = BytesIO() foto.save(thumb_io, format='JPEG', quality=20) archivo = thumb_io.getvalue() query = "SELECT agrega_fotografia(?,?)" cur.execute(query, (session['id_last_pet'], archivo)) if response: flash(f'Actualizacion de Información correcta', category='success') conn.commit() return redirect( url_for('mostrarMascota', id=session['id_last_pet'])) else: flash(f'Algo salio mal intente nuevamente', category='danger') return redirect(url_for('loginpage')) except mariadb.Error as e: print(f'Error en base de datos: {e}') flash(f'Error en base de datos', category='danger') finally: cur.close() return render_template('fichaMascota.html', Title='Informacion de Mascota', form=form, estado=estado, latitud=latitud, longitud=longitud)
def loc(): g = geocoder.ip('me') return(g.city)
'/Users/indukurisuryasaiharischyandraprasad/Desktop/DATASET/Final_Year_Project' ) f = open('face_encodings.pckl', 'wb') pickle.dump(known_face_encodings, f) f.close() f = open('face_names.pckl', 'wb') pickle.dump(known_face_names, f) f.close() print("Success") if mode == 'display': g = geocoder.ip('me') location1 = " \nCity: " + str(g.city) + " \nState: " + str( g.state) + " \nCountry: " + str(g.country) + " \nPincode: " + str( g.postal) (lat, long) = getLocation() location = "Latitude: " + str(lat) + " Longitude: " + str(long) + location1 lat_in_deg = math.floor(lat) lat_in_min1 = (lat - lat_in_deg) * 60 lat_in_min = math.floor(lat_in_min1) lat_in_sec = round((lat_in_min1 - lat_in_min) * 60, 2) long_in_deg = math.floor(long) long_in_min1 = (long - long_in_deg) * 60
def index(): check = 0 error_msg = " " if request.method == "POST": if request.form['button'] == 'Submit': try: earliest = "" latest = "" start = "" end = "" first = request.form['datetimepicker3'] last = request.form['datetimepicker1'] early = str(dt.date.today()) + " " + first earliest = dt.datetime.strptime( early, '%Y-%m-%d %I:%M %p').timestamp() late = str(dt.date.today()) + " " + last latest = dt.datetime.strptime(late, '%Y-%m-%d %I:%M %p').timestamp() start = request.form['startLocation'] end = request.form['endLocation'] recieving_phone_number = request.form['phoneNumber'] if not start or not end: error_msg = "You must enter your phone number, earliest and latest arrival times, and your start point and final destination." return render_template("index.html", ret=[], length=0, error=error_msg, early="", late="") except: error_msg = "You must enter your phone number, earliest and latest arrival times, and your start point and final destination." return render_template("index.html", ret=[], length=0, error=error_msg, early="", late="") #get current longitude and latitude if not start: g = geocoder.ip('me') curr_loc = g.latlng origin_str = str(curr_loc[0]) + "," + str(curr_loc[1]) else: origin_str = start #api key key = 'AIzaSyDVZ5-R9dw1EFWuzQ8ofHZmqqb4mHiVQfw' #url format: url/json?origin=&destination=&arrival=&apikey= url = 'https://maps.googleapis.com/maps/api/distancematrix/json?' #origin_str = str(curr_loc[0]) + "," + str(curr_loc[1]) url += 'origins=' + origin_str #print(url) #destinations = '13813 Saratoga Vista Ave' #destinations = '38.953,-77.2295' destinations = '+'.join(end.split(' ')) #convert to url format url += '&destinations=' + destinations #window gives 2 date time objects #earliest = early.timestamp() #latest = late.timestamp() #earliest = datetime.strptime(first, "%I:%M %p") #latest = datetime.strptime(last, "%I:%M %p") #print(str(datetime.now().year) + " " + str(datetime.now().month) + " " + str(datetime.now().day) + " " + first) early_url = url + "&arrival_time=" + str(earliest) + '&key=' + key late_url = url + "&arrival_time=" + str(latest) + '&key=' + key r = requests.get(early_url) print(r.json()) early_departure_int = earliest - r.json( )['rows'][0]['elements'][0]['duration']['value'] early_departure = time.strftime( '%I:%M %p', time.localtime(early_departure_int)) r = requests.get(late_url) late_departure_int = latest - r.json( )['rows'][0]['elements'][0]['duration']['value'] late_departure = time.strftime('%I:%M %p', time.localtime(late_departure_int)) early_time = time.localtime(early_departure_int) rounded_start = math.ceil(early_departure_int / 900) * 900 print(rounded_start, late_departure_int) tracker = rounded_start time_interval = [] while tracker <= late_departure_int: time_interval.append(tracker) tracker += 900 mins_interval = [ dt.datetime.fromtimestamp(num).minute + dt.datetime.fromtimestamp(num).hour * 60 for num in time_interval ] mins_interval = np.array(mins_interval) day = dt.datetime.fromtimestamp(rounded_start).day data = pd.read_csv("data.csv") data = data.drop(labels=[ "Unnamed: 0", "driver_id", "ride_id", "event", "timestamp", "ride_distance", "ride_duration", "hour" ], axis=1) X = data.drop(labels=["ride_prime_time"], axis=1) y = data["ride_prime_time"].astype(bool).astype(int) model = RandomForestClassifier(class_weight={ 0: 1, 1: 1.1 }, min_samples_split=480, random_state=0) model.fit(X, y) data2 = data.loc[data["ride_prime_time"] > 0] X2 = data2.drop(labels=["ride_prime_time"], axis=1) y2 = data2["ride_prime_time"] model2 = RandomForestClassifier(class_weight='balanced', min_samples_leaf=10, min_samples_split=148, random_state=0) model2.fit(X2, y2) def predict(day, intervals): y_pred = [] for interval in intervals: param = np.array([day, interval]).reshape(1, -1) y_pred.append(model.predict(param)[0]) surge_pred = [] for i in range(len(intervals)): if y_pred[i]: param = np.array([day, intervals[i]]).reshape(1, -1) surge_pred.append(model2.predict(param)[0]) else: surge_pred.append(0) return surge_pred preds = predict(day, mins_interval) print(preds) tup_data = list(zip(time_interval, preds)) tup_data.sort(key=lambda x: (x[1], x[0])) print(tup_data) try: ret = tup_data[0:3] except: ret = tup_data plt.cla() plt.clf() plt.figure() time_list = [ time.strftime('%I:%M %p', time.localtime(num)) for num in time_interval ] sns.set(style="dark", rc={"lines.linewidth": 4.5}) sns.set_palette("GnBu_d", 3) plt.rcParams["font.family"] = "Helvetica" # Plot the responses for different events and regions ax = sns.lineplot(x=time_list, y=preds) # edit this line with data and vars ax.set_yticks([]) ax.set(xlabel='Time', ylabel='Surge', title="When should you ride?") plt.savefig('static/graph.png') ret = [time for time, surge in ret] print(ret) #ret = [str(dt.datetime.fromtimestamp(num).hour) + ':' + str(dt.datetime.fromtimestamp(num).minute) for num in ret] ret = [ time.strftime('%I:%M %p', time.localtime(num)) for num in ret ] print(ret) try: message = client.messages \ .create( body= "leave after " + early_departure + " by " + late_departure, from_=sending_phone_number, to=recieving_phone_number ) except: error_msg = "Unable to send text message with details to " + recieving_phone_number return render_template("index.html", ret=[], length=0, error=error_msg, early=early_departure, late=late_departure) return render_template("index.html", ret=ret, length=len(ret), error="", early=early_departure, late=late_departure) print(message.sid) else: return render_template("index.html", ret=[], length=0, error="", early="", late="") return render_template("index.html", ret=[], length=0, error="", early="", late="")
def send_stats(logger, host, port, user, password, dbname): """ Send anonymous usage statistics Example use: current_stat = return_stat_file_dict() add_update_stat(logger, 'stat', current_stat['stat'] + 5) """ try: client = InfluxDBClient(host, port, user, password, dbname) # Prepare stats before sending with session_scope(MYCODO_DB_PATH) as new_session: relays = new_session.query(Relay) add_update_stat(logger, 'num_relays', get_count(relays)) sensors = new_session.query(Sensor) add_update_stat(logger, 'num_sensors', get_count(sensors)) add_update_stat(logger, 'num_sensors_active', get_count(sensors.filter( Sensor.activated == True))) pids = new_session.query(PID) add_update_stat(logger, 'num_pids', get_count(pids)) add_update_stat(logger, 'num_pids_active', get_count(pids.filter( PID.activated == True))) lcds = new_session.query(LCD) add_update_stat(logger, 'num_lcds', get_count(lcds)) add_update_stat(logger, 'num_lcds_active', get_count(lcds.filter( LCD.activated == True))) logs = new_session.query(Log) add_update_stat(logger, 'num_logs', get_count(logs)) add_update_stat(logger, 'num_logs_active', get_count(logs.filter( Log.activated == True))) timers = new_session.query(Timer) add_update_stat(logger, 'num_timers', get_count(timers)) add_update_stat(logger, 'num_timers_active', get_count(timers.filter( Timer.activated == True))) add_update_stat(logger, 'country', geocoder.ip('me').country) add_update_stat(logger, 'ram_use_mb', resource.getrusage( resource.RUSAGE_SELF).ru_maxrss / float(1000)) user_count = 0 admin_count = 0 with session_scope(USER_DB_PATH) as db_session: users = db_session.query(Users).all() for each_user in users: user_count += 1 if each_user.user_restriction == 'admin': admin_count += 1 add_update_stat(logger, 'num_users_admin', admin_count) add_update_stat(logger, 'num_users_guest', user_count-admin_count) add_update_stat(logger, 'Mycodo_revision', MYCODO_VERSION) # Combine stats into list of dictionaries to be pushed to influxdb new_stats_dict = return_stat_file_dict() formatted_stat_dict = [] for each_key, each_value in new_stats_dict.iteritems(): if each_key != 'stat': # Do not send header row formatted_stat_dict = add_stat_dict(formatted_stat_dict, new_stats_dict['id'], each_key, each_value) # Send stats to influxdb client.write_points(formatted_stat_dict) logger.debug("[Daemon] Sent anonymous usage statistics") return 0 except Exception as except_msg: logger.warning('[Daemon] Could not send anonymous usage statictics: ' '{}'.format(except_msg)) return 1
def calcDistanceView(request): distance = None destination = None form = MeasurementForm(request.POST or None) geolocator = Photon(user_agent="measurements") # Location Coordinates g = geocoder.ip('me') lat = g.latlng[0] lon = g.latlng[1] location = geolocator.reverse(f"{lat}, {lon}") pointA = [lat, lon] # Initial Folium Map m = folium.Map(width='100%', height='100%', location=get_center_coordinates(lat, lon)) # Location Marker folium.Marker([lat, lon], tooltip='Click here for more', popup=location, icon=folium.Icon(color='blue', icon='home')).add_to(m) if form.is_valid(): instance = form.save(commit=False) # destination coordinates destination_ = form.cleaned_data.get('destination') destination = geolocator.geocode(destination_) d_lat = destination.latitude d_lon = destination.longitude pointB = (d_lat, d_lon) # calc distance distance = round(geodesic(pointA, pointB).km, 2) # calc the distance # Destination Marker m = folium.Map(width='100%', height='100%', location=get_center_coordinates(lat, lon, d_lat, d_lon), zoom_start=get_zoom(distance)) # Location Marker folium.Marker([lat, lon], tooltip='Click here for more', popup=get_center_coordinates(lat, lon), icon=folium.Icon(color='blue', icon='home')).add_to(m) folium.Marker([d_lat, d_lon], tooltip='Click here for more', popup=destination, icon=folium.Icon(color='red', icon='cloud')).add_to(m) # Draw a line between location and destination line = folium.PolyLine(locations=[pointA, pointB], weight=3, color='blue') m.add_child(line) # Append the Line to the Map # Location instance.location = location # Distance instance.distance = distance instance.save() # Map Representation m = m._repr_html_() context = { 'distance': distance, 'destination': destination, 'form': form, 'map': m, } return render(request, 'measurements/main.html', context)
def handle_action(self, text, **kwargs): """ Action handler. Handles a text command responding accordingly. """ # Use lowercase for processing. text = text.lower() logger.debug("Received text: '%s'", text) if any(word in text for word in self.STOP_LISTENING): self.speak("As you wish boss. Enjoy your day.") sys.exit() elif any(word == text for word in self.NAMES): self.speak("Yes boss?") elif "how are you" in text: self.speak("I'm fine, thank you.") elif "thank you" in text: self.speak("Any time boss.") elif any("day" in text for word in self.TIME_COMMANDS): self.speak(time.strftime("%x")) elif any(word in text for word in self.TIME_COMMANDS): self.speak(time.strftime("%X")) elif any(word in text for word in self.GREETING) or text=="hi": self.speak("Hello boss.") elif any(string == text for string in self.SWEAR): self.speak("I'm sorry boss, but I was not built to execute such a task.") elif "open" in text: obj = text.split("open ")[1] if obj not in self.OPEN_ACTIONS: self.speak("I'm sorry boss, but I'm unable to recognize the open command") else: if "browser" in obj: self.speak("Opening browser.") webbrowser.open("https://www.google.com/") elif "map" in obj: self.speak("Opening maps.") webbrowser.open("https://www.google.com/maps/") elif "gmail" in obj: self.speak("Opening gmail.") webbrowser.open("https://mail.google.com/") elif "facebook" in obj: self.speak("Opening facebook.") webbrowser.open("https://www.facebook.com/") elif "youtube" in obj: self.speak("Opening youtube.") webbrowser.open("https://www.youtube.com/") elif "home folder" in obj: self.speak("Opening home folder.") folder = os.path.expanduser("~") os.system('xdg-open "%s"' % folder) elif "music" in obj: self.speak("Opening music folder.") folder = os.path.expanduser("~/Music") os.system('xdg-open "%s"' % folder) elif "pictures" in obj: self.speak("Opening pictures folder.") folder = os.path.expanduser("~/Pictures") os.system('xdg-open "%s"' % folder) elif "videos" in obj: self.speak("Opening videos folder.") folder = os.path.expanduser("~/Videos") os.system('xdg-open "%s"' % folder) elif "downloads" in obj: self.speak("Opening downloads folder.") folder = os.path.expanduser("~/Downloads") os.system('xdg-open "%s"' % folder) elif "documents" in obj: self.speak("Opening documents folder.") folder = os.path.expanduser("~/Documents") os.system('xdg-open "%s"' % folder) elif "search" in text: txt_split = text.split(" ") if txt_split[0]=="wolfram" and txt_split[1]=="search": self.handle_wolframalpha_search(text.split("search ")[1]) else: self.speak("Opening google search.") url = "https://www.google.com.tr/search?q={}".format(text.split("search ")[1]) webbrowser.open(url) elif "calculate" in text: self.handle_wolframalpha_search(text.split("calculate ")[1]) elif any(word in text for word in self.WEATHER): if ' in ' in text: place = text.split(" in ")[1] out = data_output(data_organizer(data_fetch(url_builder_city(place)))) if 'brief' in text: self.speak(out[0]) elif 'detailed' in text: self.speak(out[1]) else: self.speak(out[0]) else: IP = ipgetter.myip() match = geocoder.ip(IP) coords = match.latlng data = data_organizer(data_fetch(url_builder_coords(coords[0], coords[1]))) out = data_output(data) if 'brief' in text: self.speak(out[0]) elif 'detailed' in text: self.speak(out[1]) else: self.speak(out[0]) elif "detect" in text or "facial detection" in text: if "faces" in text: faces = detect_faces() if faces == 1: self.speak("I'm detecting "+str(faces)+" face. I stored the frame in the data folder.") else: self.speak("I'm detecting "+str(faces)+" faces. I stored the frame in the data folder.")
# Retrieve the names of all astronauts currently in the space station engine = pyttsx3.init() ## loads the engine for text to speech url = "http://api.open-notify.org/astros.json" response = urllib.request.urlopen(url) result = json.loads(response.read()) a = open("iss.txt", "w") a.write("There are currently " + str(result["number"]) + " astronauts on the ISS \n") people = result["people"] for p in people: a.write(p["name"] + "- on board") g = geocoder.ip("me") a.write("\n Your current Lat/Long is: " + str(g.latlng)) a.close() with open('iss.txt') as f: for line in f: print(line) engine.say(line) engine.runAndWait() screen = turtle.Screen() screen.setup(1280, 720) screen.setworldcoordinates(-180, -90, 180, 90) screen.bgpic("map.gif") screen.register_shape("iss.gif") iss = turtle.Turtle()