def get_data(data): lat = data['location']['lat'] lng = data['location']['lng'] url = BASE_URL.format(lat, lng, API_KEY) r = requests.get(url) weather_data = None if r.status_code == 200: weather_data = r.json() return weather_data
def _format_url(week, season, position): type = 'weekStats' if not season: season = _get_default_season() if not week: type = 'seasonStats' week_string = '' else: week_string = '&week={}'.format(week) nfl_api_url = BASE_URL.format(type, season, week_string) if position in VALID_POSITIONS: nfl_api_url += '&position=' + position return nfl_api_url
def get_all_magazines_links_to_download() -> dict: magazines = {} for num_sheet in range(1, NUM_SHEETS + 1): url = BASE_URL.format(num_sheet=num_sheet) response = urlopen(url) data = response.read() soup = BeautifulSoup(data, features="lxml") all_elems = soup.find_all('a', href=re.compile(LINK_REGEX_HREF_CONTENT)) href = all_elems[0].attrs['href'] fname = href.split('/')[-1] magazines[num_sheet] = [(href, fname)] for a_elem in all_elems[1:]: href = a_elem.attrs['href'] fname = a_elem.text + '.pdf' magazines[num_sheet].append((href, fname)) return magazines
def test_post_endpoint(endpoint_name: str): response = requests.post(BASE_URL.format(endpoint_name=endpoint_name)) return response
def test_get_endpoint(endpoint_name: str): response = requests.get(BASE_URL.format(endpoint_name=endpoint_name)) return response.json()
def test_delete_endpoint(endpoint_name: str): response = requests.delete(BASE_URL.format(endpoint_name=endpoint_name)) return response.json()