def _post_actions(self, img_load_success: bool, sync_complete: bool, sync_keys: list): """ This is what we do when syncing is 'finished' (failure or success) If flask is working, we stop the rotation animation during syncing and set the screen red or green Otherwise we log what we can param: img_load_success - can we talk to flask and load an image on the sense hat param: sync_complete - successful syncing or not param: sync_keys - keys from dict - target sync DIRs ["key1", "key2"] """ sync_keys = str(sync_keys) # typecast for possible later logging # function for saving failure file def write_f(fname): return open(Path(mod_path, "output", fname), "w") # function for stipping unwanted chars def strip_char(chars): return chars.translate({ord(c): None for c in "'[]," }).translate({ord(c): "_" for c in " :/"}) # current date time dtn = datetime.now().strftime('%d/%m/%Y, %H:%M:%S') if img_load_success: # stop the spinning animation from during sync r_post(f"http://{self.pi_ip_port}/post_rotation/", json={"cmd": "kill"}) if sync_complete: # set green - everything good r_post(f"http://{self.pi_ip_port}/post-set-img/", json={"base": [0, 120, 0]}) # wait so it catches my eye sleep(2) # reset display to default temp display r_get(f"http://{self.pi_ip_port}/show_temp/") else: # set red - something went wrong r_post(f"http://{self.pi_ip_port}/post-set-img/", json={"base": [200, 0, 0]}) # create failure file with write_f( f"{strip_char(sync_keys)}_sync_failed_at_{strip_char(dtn)}" ): pass if sync_complete and not img_load_success: print(f"pi may be dead, but the sync completed @ {dtn}") with write_f(f"pi_dead_at_{strip_char(dtn)}"): pass if not img_load_success and not sync_complete: # create failure file with write_f( f"{strip_char(sync_keys)}_sync_and_pi_failed_at_{strip_char(dtn)}" ): pass raise Exception(f"Everything is dead @ {dtn}")
def retrieve_monero_stats(): logger.info('[INFO] Retrieve Monero market statistics from Coin Gecko') data = { 'localization': False, 'tickers': False, 'market_data': True, 'community_data': False, 'developer_data': False, 'sparkline': False } headers = { 'accept': 'application/json' } r = r_get('https://api.coingecko.com/api/v3/coins/monero', headers=headers, data=data) monero_info = { 'genesis_date': r.json()['genesis_date'], 'market_cap_rank': r.json()['market_cap_rank'], 'current_price': r.json()['market_data']['current_price']['usd'], 'market_cap': r.json()['market_data']['market_cap']['usd'], 'market_cap_rank': r.json()['market_data']['market_cap_rank'], 'total_volume': r.json()['market_data']['total_volume']['usd'], 'last_updated': r.json()['last_updated'] } logger.info(monero_info) cache.set('monero_info', monero_info, settings.CACHE_TTL)
def request(self, method, debug=False, api_format='json', **kwargs): url = self.endpoint.format(method) params = kwargs to_pass = [] for k, v in params.iteritems(): if v is None: to_pass.append(k) for p in to_pass: del params[p] if self.defaults: params.update(self.defaults) try: response = r_get(url, auth=(self.username, self.password), params=params) except (ConnectionError, Timeout, SSLError): raise GenericError if api_format == 'json': try: output = response.json() except JSONDecodeError: output = response.content else: output = response.content if debug: pprint(output) return output
def recurse(sub, hot=[], next=""): """ --------------- METHOD: recurse --------------- DESCRIPTION: Returns a list of titles from a specific subreddit. Args: @subreddit: subreddit to ping Notes: If the subreddit is invalid, this function returns None, else, all of the post titles. """ from requests import get as r_get url = 'https://www.reddit.com/r/{}/hot.json?after={}'.format(sub, next) data = r_get(url, allow_redirects=False, headers={'User-agent': ''}).json() out = ([title['data']['title'] for title in data['data']['children'][:]] if data.get('data') and data['data'].get('children') is not None else None) if out is not None: hot += out next = data['data']['after'] return recurse(sub, hot, next) if next is not None else hot
def requestSystemsSphereList(centerSysName, radius=50): if radius > 100: radius = 100 url = f'https://www.edsm.net/api-v1/sphere-systems?systemName={centerSysName}&radius={radius}' jObj = json.loads(r_get(url).text) print(f'--> data about systems in sphere around {centerSysName} is gathered /{type(jObj)}') return jObj
def get_active_bigip(self, addr_list, username, password): """ Determine the ACTIVE unit in a cluster """ addr_active = None for address in addr_list: url = 'https://{}/mgmt/tm/cm/failover-status'.format(address) try: r = r_get(url, auth=(username, password), verify=False, timeout=(0.400, 12.0)) except r_ConnectionError: raise SystemExit('ERROR: ConnectionError {}'.format(address)) except r_Timeout: raise SystemExit('ERROR: Timeout {}'.format(address)) except r_TooManyRedirects: raise SystemExit('ERROR: TooManyRedirects {}'.format(address)) except Exception as e: raise SystemExit('ERROR: r_get failed - {}'.format(e)) x = ast_eval(str(r.text)) try: status = x['entries'][ 'https://localhost/mgmt/tm/cm/failover-status/0'][ 'nestedStats']['entries']['status']['description'] except KeyError: raise SystemExit('ERROR: KeyError') except Exception as e: raise SystemExit('ERROR: {}'.format(e)) if status == "ACTIVE": self.addr_active = address
def requestSystemsCubeList(centerSysName, size=100): if size > 200: size = 200 url = f'https://www.edsm.net/api-v1/cube-systems?systemName={centerSysName}&size={size}' jObj = json.loads(r_get(url).text) print(f'--> data about systems in cube around {centerSysName} is gathered /{type(jObj)}') return jObj
def get_coin_info(self): info = self.redis.get("coin_info") if info: return json_loads(info) else: data = { 'localization': False, 'tickers': False, 'market_data': True, 'community_data': False, 'developer_data': False, 'sparkline': False } headers = {'accept': 'application/json'} url = 'https://api.coingecko.com/api/v3/coins/xolentum' # noinspection PyBroadException try: r = r_get(url, headers=headers, data=data) info = { 'genesis_date': r.json()['genesis_date'], 'market_cap_rank': r.json()['market_cap_rank'], 'current_price': r.json()['market_data']['current_price']['usd'], 'market_cap': r.json()['market_data']['market_cap']['usd'], 'total_volume': r.json()['market_data']['total_volume']['usd'], 'last_updated': r.json()['last_updated'] } self.store_data("coin_info", 15, json_dumps(info)) return info except: return {}
def _validate_ip(self, ip): proxy = {'http': ip, 'https': ip} try: resp = r_get('https://www.google.com', timeout=5, proxies=proxy) except Exception as ee: return False return bool(resp.status_code == 200)
def save_pic_1(path): for an in range(an_start, an_end+1): request = r_get(page + str(an) + '/011.jpg', stream=True) cod = request.content[0] if cod == 60 and request.content[1] == 33: break with open(path + '/' + str(an) + '_011.jpg', 'wb') as f: f.write(request.content)
def make_request(url: str, path="/get_info", data=None): if is_onion(url): proxies = {"http": f"socks5h://{config.TOR_HOST}:{config.TOR_PORT}"} timeout = 18 else: proxies = None timeout = 6 r = r_get(url + path, timeout=timeout, proxies=proxies, json=data) r.raise_for_status() return r
def get_data_file(filename): """Получение данных в систему через API""" url = CovidData.COVID_URL + filename r = r_get(url) if r.status_code == 200: with open(filename, 'wb') as f: f.write(r.content) CovidData.CACHE.append(filename) return filename # type: str else: return None
def __call__(self, country, region=None, timeout=30.0, reattempt=5): for _ in range(reattempt): try: if u',' in country and region is None: country_, region_ = country.split(u',', 1) country_ = u' '.join(country_.strip().split()) region_ = u' '.join(region_.strip().split()) else: country_, region_ = country, region try: url = self.get_url(country=country_, region=region_) content = r_get(url, timeout=timeout).content return loads(content.strip())[0] except IndexError: pass try: url = self.get_url(country=country_, region=region_, force_query=True) content = r_get(url, timeout=timeout).content return loads(content.strip())[0] except IndexError: pass if region is None: try: url = self.get_url(country=country_) content = r_get(url, timeout=timeout).content return loads(content.strip())[0] except IndexError: pass try: url = self.get_url(country=country_, force_query=True) content = r_get(url, timeout=timeout).content return loads(content.strip())[0] except IndexError: pass except: print format_exc() raise LookupError("Failed to find results for query: %s, %s" % (country, region))
def get_book_data(isbn): good_reads = r_get(GOODREAD_URL, params={ 'key': GOODREAD_KEY, 'isbns': isbn }) if good_reads.status_code != 200: return {'error_message': "Internal error"} book = Book.get_as_dict(isbn=isbn) good_reads = good_reads.json()['books'][0] filtered_data = {key: good_reads[key] for key in GOODREAD_API} return {**book, **filtered_data}
def get_feed_contents(feed_url: str): headerz = { # adding because some websites blocked python headers... # specifically s3daily 'User-Agent': 'curl' } # print(rss_feed['feed_url']) try: feed_contents = r_get(feed_url, headers=headerz) return feed_contents except Exception as e: print('WARNING:\nhad the following error for url: {}\n: {}'.format( feed_url, e)) return None
def run(self): """Checks on line for updates""" try: update_url = (self.server_name + '/current_version/' + self.short_name + '_current_version.txt') webpage = r_get(update_url) online_version = webpage.text[1:] if StrictVersion(online_version) > StrictVersion(self.version[1:]): self.do_update(update_url, online_version) else: return except Exception as error: print "Error in update_check: ", error
def download(url: str, file: str = "") -> str: """ Download a file from 'url' and save it as 'file'. Parameters: url -- URL the file is downloaded from file -- (Optional) filename the downloaded file is saved into (default: "") Returns: A filename where the downloaded file has stored into """ r = r_get(url) if not file: file = NamedTemporaryFile().name open(file, 'wb').write(r.content) return file
def argument_parser(self): parser = ArgumentParser(usage="python server.py [-options]") parser.add_argument("-ip", type=str, nargs="?", const=self.ip, default=self.ip, help="Set ip address.") parser.add_argument("-port", type=int, nargs="?", const=self.port, default=self.port, help="Set port. Default is set to 6000.") parser.add_argument("-get-local-ip", action="store_true", help="Find and set local ip.") parser.add_argument("-get-external-ip", action="store_true", help="Find and set external ip.") parser.add_argument("-l", "--list", action="store_true", help="Print all commands.") parser.add_argument( "-w", "--wait", action="store_true", help="Start with wait command. Wait for connections.") args = parser.parse_args() if args.get_local_ip: device_name = socket.gethostname() print("Reading local ip from: {}".format(device_name)) self.ip = socket.gethostbyname(device_name) elif args.get_external_ip: self.ip = r_get('https://api.ipify.org').text else: self.ip = args.ip self.port = args.port self.list = args.list self.wait_mode = args.wait
def download_oui_defs(fpath: str, force_dl=False) -> bool: # file exists and is not older than 1 week if (isfile(fpath) and stat(fpath).st_mtime > 604800) and not force_dl: print(f"{Fore.CYAN}Definitions exist and file is less than one week old, omitting download") return True else: if force_dl: print(f"{Fore.LIGHTRED_EX}Download forced, please wait...") else: print(f"{Fore.CYAN}Definitions not found or too old, downloading file, please wait...") r = r_get("http://standards-oui.ieee.org/oui.txt") if r.status_code == 200: with open(fpath, "wb") as fp: fp.write(r.content) return True else: print(f"{Fore.RED}Couldn't download oui definitions! HTTP status was {r.status_code}") return False
def save_pics(path): for an in range(an_start, an_end+1): filler = fill pic = 1 while 1: if pic > 9: break filler = '/01' pic -= 9 request = r_get(page + str(an) + filler + str(pic) + '.jpg', stream=True) cod = request.content[0] if cod == 60 and request.content[1] == 33: break pic += 1 with open(path + '/' + str(an) + '_' + filler[2] + str(pic) + '.jpg', 'wb') as f: #request.raw.decode_content = True #shutil.copyfileobj(request.raw, f) f.write(request.content)
def count_words(sub, word_list, next="", keys={}): """ ------------------- METHOD: count_words ------------------- DESCRIPTION: Returns the number of times keywords stored in word_list appeared Args: @subreddit: subreddit to ping Notes: If the subreddit is invalid, this function returns None, else, all of the post titles. """ from requests import get as r_get url = 'https://www.reddit.com/r/{}/hot.json?after={}'.format(sub, next) data = r_get(url, allow_redirects=False, headers={'User-agent': ''}).json() out = ([title['data']['title'] for title in data['data']['children'][:]] if data.get('data') and data['data'].get('children') is not None else None) # Create a dictionary storing the amount of times a # specifc keyword was found in out for words in word_list: keys[words] = 0 if words not in keys else keys[words] for titles in out: if words.lower() in titles.lower().split(): keys[words] += titles.lower().split().count(words) if out is not None: next = data['data']['after'] # If we aren't at the last page of the API, continue if next is not None: return count_words(sub, word_list, next, keys) # Alright, we're at the end, print the mines... else: if keys == {}: print() for k, v in sorted(keys.items(), key=lambda p: p[1], reverse=True): if v != 0: print(k + ":", v)
def recurse(): """ --------------- METHOD: recurse --------------- DESCRIPTION: Returns a list of titles from a specific subreddit. Args: @subreddit: subreddit to ping Notes: If the subreddit is invalid, this function returns None, else, all of the post titles. """ from requests import get as r_get url = 'https://www.reddit.com/r/deadsubs/hot.json?after=t3_1njucr'.format() data = r_get(url, allow_redirects=False, headers={'User-agent': ''}).json() print(type(data['data']['after']), data['data']['before'])
def ask_online(self, source_cur, destinion_cur): if type(source_cur) is not str: raise ValueError(self.error_template.format(source_cur, str, type(source_cur))) if type(destinion_cur) is not str: raise ValueError(self.error_template.format(destinion_cur, str, type(destinion_cur))) if len(source_cur) != 3: raise ValueError("Invalid 'source_cur' argument format. Should be 3 letter string") if len(destinion_cur) != 3: raise ValueError("Invalid 'destinion_cur' argument format. Should be 3 letter string") source_cur = source_cur.upper() destinion_cur = destinion_cur.upper() page = r_get(ONLINE_CONVERTER_URL, params = {'Amount' : self.requested_amount, 'From' : source_cur, 'To' : destinion_cur}) return self.parse_page(page.text)
def number_of_subscribers(subreddit): """ ----------------------------- METHOD: number_of_subscribers ----------------------------- Description: Returns the number of subscribers in a specific subreddit. Args: @subreddit: subreddit to ping Notes: If the subreddit is invalid, this function returns a 0, else, the number of subs """ from requests import get as r_get url = "https://www.reddit.com/r/{}/about.json".format(subreddit) sub = r_get(url, allow_redirects=False, headers={'User-agent': ''}).json() subs_count = sub['data']['subscribers'] if 'data' in sub else 0 return subs_count
def get(self): response = r_get(url=self.manifest['url'], auth=self.manifest['auth'], params=self.manifest['parameters']) if response.status_code == 200: if response.headers['Content-Type'] == 'application/json': # print(json.dumps(response.json(), indent=4)) pass else: raise ValueError( 'Content-Type did not equal "application/json"') elif response.status_code == 401: raise ValueError( 'Authentication error while fetching wiki content') else: raise ValueError( f'Bad status code while fetching wiki content ({response.status_code})' ) self.data = response.json() self.get_result_code = response.status_code print(f'INFO :: GET Result Code = {self.get_result_code}')
def top_ten(subreddit): """ --------------- METHOD: top_ten --------------- Description: Finds and returns the top 10 posts on a specific subreddit. Args: @subreddit: subreddit to ping Notes: If the subreddit is invalid, this function returns None, else, upto 10 posts. """ from requests import get as r_get url = 'https://www.reddit.com/r/{}/hot.json'.format(subreddit) data = r_get(url, allow_redirects=False, headers={'User-agent': ''}).json() out = ([title['data']['title'] for title in data['data']['children'][:10]] if data.get('data') and data['data'].get('children') is not None else None) ([print(titles) for titles in out] if out is not None else print(None))
def do_update(self, url_path, online_version): """download and install""" # ask if they want to update dlg = wx.MessageDialog(parent=self.parent, message=self.name + ' v' + str(StrictVersion(online_version)) + ' is available. \r' + 'Do you want to download and update?', caption='Do you want to update?', style=wx.OK | wx.CANCEL) if dlg.ShowModal() != wx.ID_OK: dlg.Destroy() return dlg.Destroy() response = r_get(self.server_name + '/software/' + self.path_name + '/' + self.common_name + '_' + str(StrictVersion(online_version)) + '.exe', stream=True) if not response.ok: print response return total_length = response.headers.get('content-length') if total_length is None: # no content length header return else: total_length = int(total_length) # / 1024 downloadBytes = total_length / 100 dlg = wx.ProgressDialog("Download Progress", "Downloading update now", parent=self.parent, style=wx.PD_AUTO_HIDE | wx.PD_CAN_ABORT | wx.PD_REMAINING_TIME) temp_folder = os.environ.get('temp') temp_file = (os.path.join(temp_folder, self.common_name + '_' + str(StrictVersion(online_version)) + '.exe')) with open(temp_file, 'wb') as handle: count = 0 for data in response.iter_content(downloadBytes): if data: count += 1 if count >= 100: count = 99 handle.write(data) (cancel, skip) = dlg.Update(count, "Downloaded " + str(downloadBytes * count / 1024) + " of " + str(total_length / 1024) + "KB") if not cancel: response.close() dlg.Destroy() if not cancel: dlg = wx.MessageDialog( parent=self.parent, message='Download Cancelled\r\r' + 'If you want to run the update again, please restart program.', caption='Update program cancelled', style=wx.OK) dlg.ShowModal() return self.install_update(online_version, temp_file)
def _fetch(url, **kwargs): return r_get(url, **kwargs).content
def login(provider): # STEP 1 - Parse the auth code code = request.data if provider == 'google': # STEP 2 - Exchange for a token try: # Upgrade the authorization code into a credentials object oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='') oauth_flow.redirect_uri = 'postmessage' credentials = oauth_flow.step2_exchange(code) except FlowExchangeError: response = make_response( dumps('Failed to upgrade the authorization code.'), 401) response.headers['Content-Type'] = 'application/json' return response # Check that the access token is valid. access_token = credentials.access_token url = ( 'https://www.googleapis.com/oauth2/v1/tokeninfo?access_token=%s' % access_token) h = Http() result = loads(h.request(url, 'GET')[1]) # If there was an error in the access token info, abort. if result.get('error') is not None: response = make_response(dumps(result.get('error')), 500) response.headers['Content-Type'] = 'application/json' # Get user info h = Http() userinfo_url = "https://www.googleapis.com/oauth2/v1/userinfo" params = {'access_token': credentials.access_token, 'alt': 'json'} answer = r_get(userinfo_url, params=params) data = answer.json() # see if user exists, if it doesn't make a new one user = get_user_by_email(email=data['email']) if not user: user = create_user(username=data.get('name'), picture=data.get('picture'), email=data.get('email'), first_name=data.get('given_name'), last_name=data.get('family_name'), password=get_unique_str(8)) g.user = user # Make token token = g.user.generate_auth_token() # Send back token to the client return jsonify({ 'token': token.decode('ascii'), 'uid': g.user.id, 'first_name': g.user.first_name, 'last_name': g.user.last_name, 'email': g.user.email, 'picture': g.user.picture, 'status': g.user.status, 'full_name': g.user.get_full_name }), 200 elif provider == 'facebook': data = request.json.get('data') access_token = data['access_token'] fb_file = ''.join([BASE_DIR, '/facebook.json']) fb_data = loads(open(fb_file, 'r').read())['facebook'] app_id = fb_data['app_id'] app_secret = fb_data['app_secret'] url = fb_data['access_token_url'] % (app_id, app_secret, access_token) h = Http() result = h.request(url, 'GET')[1] # Use token to get user info from API token = result.split(',')[0].split(':')[1].replace('"', '') url = fb_data['user_info_url'] % token h = Http() result = h.request(url, 'GET')[1] data = loads(result) name = data['name'].split(' ') user_data = dict() user_data['provider'] = 'facebook' user_data['username'] = data.get('name') user_data['first_name'] = name[0] user_data['last_name'] = name[1] user_data['email'] = data.get('email') user_data['facebook_id'] = data.get('id') user_data['access_token'] = token url = fb_data['picture_url'] % token h = Http() result = h.request(url, 'GET')[1] data = loads(result) user_data['picture'] = data['data']['url'] # login_session['picture'] = data["data"]["url"] # see if user exists user_info = get_user_by_email(user_data['email']) if user_info is None: user_info = create_user(username=user_data['username'], password=get_unique_str(8), first_name=user_data['first_name'], last_name=user_data['last_name'], email=user_data['email'], picture=user_data['picture']) g.user = user_info token = g.user.generate_auth_token() return jsonify({ 'token': token.decode('ascii'), 'uid': g.user.id, 'first_name': g.user.first_name, 'last_name': g.user.last_name, 'email': g.user.email, 'picture': g.user.picture, 'status': g.user.status, 'full_name': g.user.get_full_name }), 200 else: return jsonify({'error': 'Unknown provider'}), 200
def login(provider): # STEP 1 - Parse the auth code auth_code = request.json.get('auth_code') print "Step 1 - Complete, received auth code %s" % auth_code if provider == 'google': # STEP 2 - Exchange for a token try: # Upgrade the authorization code into a credentials object oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='') oauth_flow.redirect_uri = 'postmessage' credentials = oauth_flow.step2_exchange(auth_code) except FlowExchangeError: response = make_response( json.dumps('Failed to upgrade the authorization code.'), 401) response.headers['Content-Type'] = 'application/json' return response # Check that the access token is valid. access_token = credentials.access_token url = (WEB['access_token'] % access_token) h = Http() result = json.loads(h.request(url, 'GET')[1]) # If there was an error in the access token info, abort. if result.get('error') is not None: response = make_response(json.dumps(result.get('error')), 500) response.headers['Content-Type'] = 'application/json' # # Verify that the access token is used for the intended user. # gplus_id = credentials.id_token['sub'] # if result['user_id'] != gplus_id: # response = make_response(json.dumps("Token's user ID doesn't match given user ID."), 401) # response.headers['Content-Type'] = 'application/json' # return response # # Verify that the access token is valid for this app. # if result['issued_to'] != CLIENT_ID: # response = make_response(json.dumps("Token's client ID does not match app's."), 401) # response.headers['Content-Type'] = 'application/json' # return response # stored_credentials = login_session.get('credentials') # stored_gplus_id = login_session.get('gplus_id') # if stored_credentials is not None and gplus_id == stored_gplus_id: # response = make_response(json.dumps('Current user is already connected.'), 200) # response.headers['Content-Type'] = 'application/json' # return response print "Step 2 Complete! Access Token : %s " % credentials.access_token # STEP 3 - Find User or make a new one # Get user info h = Http() userinfo_url = "https://www.googleapis.com/oauth2/v1/userinfo" params = {'access_token': credentials.access_token, 'alt': 'json'} answer = r_get(userinfo_url, params=params) data = answer.json() name = data['name'] picture = data['picture'] email = data['email'] # see if user exists, if it doesn't make a new one user = session.query(User).filter_by(email=email).first() if not user: user = User(username=name, picture=picture, email=email) session.add(user) session.commit() # STEP 4 - Make token token = user.generate_auth_token(600) # STEP 5 - Send back token to the client return jsonify({'token': token.decode('ascii')}) # return jsonify({'token': token.decode('ascii'), 'duration': 600}) else: return 'Unrecoginized Provider'
def requestMarketCommoditiesInfo(marketId): url = f'https://www.edsm.net/api-system-v1/stations/market?marketId={marketId}' jObj = json.loads(r_get(url).text) print(f'--> data about comms in market#{marketId} is gathered /{type(jObj)}') return jObj['commodities']
def requestSystemStationsInfo(systemName): url = f'https://www.edsm.net/api-system-v1/stations?systemName={systemName}' jObj = json.loads(r_get(url).text) print(f'--> data about stations into {systemName} is gathered /{type(jObj)}') return jObj.get('stations', [])