def _initStats(self, season=None, perMode='PerGame'): ''' Constructs a Stats object with a given season :param season: A valid season string (ex. '2015-16'), default the Player's season :param perMode: How the averages are displayed, 'PerGame', 'Per36', etc. :return: ***IMPORTANT*** Returns a tuple of a seasonStats list along with a statsHeaders list ''' if not season: season = self.season() loadSuccess = True stats_url = helpers.get_url('playercareerstats', PerMode=perMode, PlayerID=self.playerID()) try: allStats = Series( requests.get(stats_url, headers=USER_AGENT).json()) except ValueError as e: loadSuccess = False print 'Could not load stats for player "' + self.name() + '"' print 'Error:', e.message print 'URL:', stats_url if loadSuccess: statsHeaders = allStats['resultSets'][0]['headers'] for obj in allStats['resultSets'][0]['rowSet']: if season in obj: seasonStats = obj self._stats = Stats( Series(dict(zip(statsHeaders, seasonStats)), name=self._name)) else: self._stats = None
def download(bot, update): try: try: text = update.callback_query.data update = update.callback_query except: text = update.message.text if not helpers.check(text): bot_msg = bot.send_message(chat_id=update.message.chat_id, text=usage_msg) time.sleep(20) bot.delete_message(chat_id=update.message.chat_id, message_id=bot_msg.message_id) else: sent_msg = bot.send_message(chat_id=update.message.chat_id, text=dwn_msg) url = helpers.get_url(text) vId = helpers.get_vId(url) sys.stdout.write("New song request client username %s\n" % update.message.chat.username) audio_info = downloader.download_audio(vId, url) bot.delete_message(chat_id=update.message.chat_id, message_id=sent_msg.message_id) try: bot.delete_message(chat_id=update.message.chat_id, message_id=update.message.message_id) except: pass if not audio_info["status"]: msg = "Something went wrong: %s" % audio_info["error"] return bot.send_message(chat_id=update.message.chat_id, text=msg) audio = open(audio_info["path"], 'rb') bot.send_audio(chat_id=update.message.chat_id, audio=audio, duration=audio_info["duration"], title=audio_info["title"], timeout=999) except: pass
def download_previous_vintage(self) -> None: """Downloads the previous NRN vintage and extracts the English GeoPackage as <source>_old.gpkg.""" logger.info("Retrieving previous NRN vintage.") # Determine download requirement. if self.nrn_old_path["gpkg"].exists(): logger.warning( f"Previous NRN vintage already exists: \"{self.nrn_old_path['gpkg']}\". Skipping step." ) else: # Download previous NRN vintage. logger.info("Downloading previous NRN vintage.") download_url = None try: # Get download url. download_url = helpers.load_yaml( filepath.parents[1] / "downloads.yaml")["previous_nrn_vintage"][self.source] # Get raw content stream from download url. download = helpers.get_url(download_url, stream=True, timeout=30, verify=True) # Copy download content to file. with open(self.nrn_old_path["zip"], "wb") as f: shutil.copyfileobj(download.raw, f) except (requests.exceptions.RequestException, shutil.Error) as e: logger.exception( f"Unable to download previous NRN vintage: \"{download_url}\"." ) logger.exception(e) sys.exit(1) # Extract zipped data. logger.info("Extracting zipped data for previous NRN vintage.") gpkg_download = [ f for f in zipfile.ZipFile(self.nrn_old_path["zip"], "r").namelist() if f.lower().startswith("nrn") and Path(f).suffix == ".gpkg" ][0] with zipfile.ZipFile(self.nrn_old_path["zip"], "r") as zip_f: with zip_f.open(gpkg_download) as zsrc, open( self.nrn_old_path["gpkg"], "wb") as zdest: shutil.copyfileobj(zsrc, zdest) # Remove temporary files. logger.info("Removing temporary files for previous NRN vintage.") if self.nrn_old_path["zip"].exists(): self.nrn_old_path["zip"].unlink()
def get_dict(*keys, **extras): """Returns request dict of given keys.""" _keys = ('url', 'args', 'form', 'data', 'origin', 'headers', 'files', 'json', 'method') assert all(map(_keys.__contains__, keys)) data = request.data form = semiflatten(request.form) try: _json = json.loads(data.decode('utf-8')) except (ValueError, TypeError): _json = None d = dict( url=get_url(request), args=semiflatten(request.args), form=form, data=json_safe(data), origin=request.headers.get('X-Forwarded-For', request.remote_addr), headers=get_headers(), files=get_files(), json=_json, method=request.method, ) out_d = dict() for key in keys: out_d[key] = d.get(key) out_d.update(extras) return out_d
def _initStats(self, season=None, perMode='PerGame'): ''' Constructs a Stats object with a given season :param season: A valid season string (ex. '2015-16'), default the Player's season :param perMode: How the averages are displayed, 'PerGame', 'Per36', etc. :return: ***IMPORTANT*** Returns a tuple of a seasonStats list along with a statsHeaders list ''' if not season: season = self.season() loadSuccess = True stats_url = helpers.get_url('playercareerstats', PerMode=perMode, PlayerID=self.playerID()) try: allStats = Series(requests.get(stats_url, headers=USER_AGENT).json()) except ValueError as e: loadSuccess = False print 'Could not load stats for player "' + self.name() + '"' print 'Error:', e.message print 'URL:', stats_url if loadSuccess: statsHeaders = allStats['resultSets'][0]['headers'] for obj in allStats['resultSets'][0]['rowSet']: if season in obj: seasonStats = obj self._stats = Stats(Series(dict(zip(statsHeaders, seasonStats)), name=self._name ) ) else: self._stats = None
def presenca(month,year): for dia in range(1,31): data_string=str(dia)+'/'+str(month)+'/'+str(year) data_file=str(dia)+'-'+str(month)+'-'+str(year) url='http://www.camara.gov.br/sitcamaraws/SessoesReunioes.asmx/ListarPresencasDia?data='+data_string+'&numMatriculaParlamentar=&parteNomeParlamentar=&siglaPartido=&siglaUF=' print url data = get_url(url) to_file("presenca/"+data_file+".xml",data)
def test_simple_string_helpers(self): """Tests the simple helper functions as adding extension and route.""" domain = "mydomain.com" route = "/route" with_extension = "myfile.csv" without_extension = "myfile" self.assertEqual(h.get_url(domain, route), domain + route) self.assertEqual(h.add_extension(with_extension, ".csv"), with_extension) self.assertEqual(h.add_extension(without_extension, ".csv"), with_extension)
def test_invalid_target_page(self): """Tests if an invalid target url yields an invalid target page.""" route = "/ganhe-brindes" url = helpers.get_url(self.domain, route) page = wp.WebPage(url) self.assertTrue(page.title) self.assertTrue(page.domain) self.assertFalse(page.valid_target) self.assertEqual(page.target_name, page._INVALID_TARGET)
def test_valid_target_page(self): """Tests if a valid target url yields a full target page.""" route = "/hypnose-eau-de-toilette-lancome-perfume-feminino/p" url = helpers.get_url(self.domain, route) page = wp.WebPage(url) self.assertTrue(page.title) self.assertTrue(page.domain) self.assertTrue(page.valid_target) self.assertEqual(page.domain, self.domain)
def get_url(self, field): if field.thumbnail_size: filename = field.thumbnail_fn(field.data) else: filename = field.data if field.url_relative_path: filename = urljoin(field.url_relative_path, filename) return get_url(field.endpoint, filename=filename)
def __call__(self, field, **kwargs): kwargs.setdefault('data-role', 'select2-ajax') kwargs.setdefault('data-url', get_url('.ajax_lookup', name=field.loader.name)) allow_blank = getattr(field, 'allow_blank', False) if allow_blank and not self.multiple: kwargs['data-allow-blank'] = u'1' kwargs.setdefault('id', field.id) kwargs.setdefault('type', 'hidden') if self.multiple: result = [] ids = [] for value in field.data: data = field.loader.format(value) result.append(data) ids.append(as_unicode(data[0])) separator = getattr(field, 'separator', ',') kwargs['value'] = separator.join(ids) kwargs['data-json'] = json.dumps(result) kwargs['data-multiple'] = u'1' else: data = field.loader.format(field.data) if data: kwargs['value'] = data[0] kwargs['data-json'] = json.dumps(data) placeholder = field.loader.options.get('placeholder', 'Please select Model') kwargs.setdefault('data-placeholder', placeholder) minimum_input_length = int( field.loader.options.get('minimum_input_length', 1)) kwargs.setdefault('data-minimum-input-length', minimum_input_length) return Markup('<input %S>' % html_params(name=field.name, )**kwargs)
#!/usr/local/bin/python import requests from pandas import Series from enum import Enum import helpers from constants import * from stats import Stats from stats import printStats playersDidLoadCorrectly = True players_url = helpers.get_url('commonallplayers', LeagueID=LEAGUE_ID, Season=CURRENT_SEASON, IsOnlyCurrentSeason=0) try: allPlayers = Series(requests.get(players_url, headers=USER_AGENT).json()) except ValueError as e: print 'Players could not be loaded.' print 'Error:', e.message print 'URL:', players_url playersDidLoadCorrectly = False if playersDidLoadCorrectly: headers = allPlayers['resultSets'][0]['headers'] def findPlayer(first, last):
#!/usr/local/bin/python import requests from pandas import Series from enum import Enum import helpers from constants import * from stats import Stats from stats import printStats playersDidLoadCorrectly = True players_url = helpers.get_url('commonallplayers', LeagueID=LEAGUE_ID, Season=CURRENT_SEASON, IsOnlyCurrentSeason=0) try: allPlayers = Series(requests.get(players_url, headers=USER_AGENT).json()) except ValueError as e: print 'Players could not be loaded.' print 'Error:', e.message print 'URL:', players_url playersDidLoadCorrectly = False if playersDidLoadCorrectly: headers = allPlayers['resultSets'][0]['headers'] def findPlayer(first, last): name = last.lower().strip() + ', ' + first.lower().strip() for player in allPlayers['resultSets'][0]['rowSet']: if name == player[DISPLAY_LAST_COMMA_FIRST].lower():