def write_dishes(new_dishes): try: dishes_file = open('../data/dishes.json', 'w') dishes_file.write(json.dumps(new_dishes, indent=4)) prGreen('Data saved') except Exception as e: prLightOrange(e)
def read_dishes(): try: dishes_file = open('../data/dishes.json', 'r') dishes = json.load(dishes_file)['dishes'] return dishes except Exception as e: prLightOrange(e) return None
def order_alphabetically(dic): res = {} try: sortednames = sorted(dic.keys(), key=lambda x: x.lower()) for game in sortednames: res[game] = dic[game] return res except Exception as e: prLightOrange(e)
def get_game_name(url): page = get_resource(url) try: soup = BeautifulSoup(page.content, 'html.parser') game_name = soup.find('h2', class_='pdp__title') return game_name.text except AttributeError: prLightOrange(f"Wrong url: {url}") return None
def read_prices(): prices = None try: prices_file = open(DATA_PATH + 'prices.json', 'r') prices = json.load(prices_file) except FileNotFoundError: prLightOrange(f"Data needs to be downloaded first") except Exception as e: prLightOrange(e) return prices
def search_game(game): search = get_resource(PSSTR_SEARCH + game) game_url = '' try: soup = BeautifulSoup(search.content, 'html.parser') game_field = soup.find('div', class_='grid-cell grid-cell--game') for ref in game_field.find_all('a', href=True): game_url = ref['href'] except AttributeError: prLightOrange(f"Game: {game} couldn't be found") finally: return game_url
def get_resource(url): try: page = requests.get(url) if not page.ok: prRed(f"Error getting resource: {url}") prLightOrange(f"Response code: {page}") return None return page except Exception as e: prRed('Error getting resource') prLightOrange(e) return None
def save_prices(): res = {} games = list(GAMES.keys()) random.shuffle(games) with ThreadPool(len(games)) as pool: prices = pool.map(save_price, games) for i, game in enumerate(games): res[game] = { 'ps': prices[i][0], 'st': prices[i][1], 'ep': prices[i][2] } try: prices = open(DATA_PATH + 'prices.json', 'w+') prices.write(json.dumps(order_alphabetically(res), indent=4)) prGreen(f"Data was successfully saved") except Exception as e: prLightOrange(e)
def display_prices(): try: prices_file = open(DATA_PATH + 'prices.json', 'r') prices = json.load(prices_file) print( '------------------------------------------------------------------------------------------------------------------------------------------------------------' ) print( f"\033[97m | Videogame | Playstation Store | Steam | Epic Games Store |\033[00m" ) print( '------------------------------------------------------------------------------------------------------------------------------------------------------------' ) for game in prices: name = ' '.join([n.capitalize() for n in game.split('-')]) ps_str = '' st_str = '' while len(name) < 30: name += ' ' # If the game is discounted on the ps store if isinstance(prices[game]['ps'], list): original_ps = prices[game]['ps'][0] final_ps = prices[game]['ps'][1] countdown_ps = prices[game]['ps'][2] ps_str = f"\033[91m{original_ps}\033[00m \033[97m->\033[00m \033[92m{final_ps}\033[00m (Until: {countdown_ps})" while len(ps_str) < 65: ps_str += ' ' # If not else: price = prices[game]['ps'] if not price: price = '-----' ps_str = f"\033[97m {price}\033[00m" while len(ps_str) < 45: ps_str += ' ' # If the game is discounted on steam if isinstance(prices[game]['st'], list): original_st = prices[game]['st'][0] final_st = prices[game]['st'][1] countdown_st = prices[game]['st'][2] st_str = f"\033[97m|\033[91m {original_st}\033[00m \033[97m->\033[00m \033[92m{final_st}\033[00m (Until: {countdown_st})" while len(st_str) < 73: st_str += ' ' # If not else: price = prices[game]['st'] if not price: price = '-----' st_str = f"\033[97m| {price}\033[00m" while len(st_str) < 48: st_str += ' ' # If the game is discounted on the epic games store if isinstance(prices[game]['ep'], list): original_ep = prices[game]['ep'][0] final_ep = prices[game]['ep'][1] countdown_ep = prices[game]['ep'][2] ep_str = f"\033[97m|\033[91m {original_ep}\033[00m \033[97m->\033[00m \033[92m{final_ep}\033[00m (Until: {countdown_ep}) \033[97m |\033[00m" while len(ep_str) < 73: ep_str += ' ' # If not else: price = prices[game]['ep'] if not price: price = '-----' ep_str = f"\033[97m| {price} |\033[00m" while len(ep_str) < 47: ep_str += ' ' print( f"\033[97m -> {name} | \033[00m {ps_str} {st_str} {ep_str}") except FileNotFoundError: prLightOrange(f"Data needs to be downloaded first") except Exception as e: prLightOrange(e)
def get_game_price_ep(url): if not url: return None # Chrome options chrome_options = webdriver.ChromeOptions() user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36' chrome_options.add_argument(f'user-agent={user_agent}') # user_agent chrome_options.add_argument('--headless') # headless mode chrome_options.add_argument('--incognito') # incognito mode # Webdriver driver = webdriver.Chrome(CHROMEDRIVER_PATH, options=chrome_options) driver.get(url) try: # Classes values top_price_class = 'css-8v8on4' disc_top_price_class = 'css-1blw3mq-Price__discount' bot_price_class = 'css-ovezyj' disc_bot_price_class = 'css-1kf4kf9-Price__discount' # WebDriverWait() until price data gets loaded price_section = EC.presence_of_element_located( (By.CLASS_NAME, 'css-r6gfjb-PurchasePrice__priceContainer')) WebDriverWait(driver, 10).until(price_section) # BeautifulSoup gets HTML soup = BeautifulSoup( driver.page_source.encode('utf-8').strip(), 'html.parser') # Close driver driver.quit() # Check if price appears on the top section price_top_section = soup.find('div', class_='css-4tpn3e') if price_top_section: prices = price_top_section.findAll( attrs={'data-component': 'Price'}) if len(prices) < 2: # If not discounted return prices[0].text[:-2].replace(',', '.') else: # If discounted countdown_section = soup.find( 'div', attrs={ 'data-component': 'PurchaseCaption' }).find('span', attrs={'data-component': 'Message'}) countdown_value = countdown_section.text.split()[4].split('/') countdown_month = MONTHS.get(int(countdown_value[1]), None) return (prices[0].text[:-2].replace(',', '.'), prices[1].text[:-2].replace(',', '.'), countdown_value[0] + '-' + countdown_month) # Check if price appears on the bot section else: products = soup.findAll('div', attrs={'data-component': 'ProductCard'}) price_section = products[0].find( 'div', attrs={'data-component': 'PriceLayout'}) prices = price_section.findAll(attrs={'data-component': 'Price'}) if len(prices) < 2: # If not discounted return prices[0].text[:-2].replace(',', '.') else: # If discounted countdown_section = price_section.find( 'div', attrs={ 'data-component': 'PurchaseCaption' }).find('span', attrs={'data-component': 'Message'}) countdown_value = countdown_section.text.split()[4].split('/') countdown_month = MONTHS.get(int(countdown_value[1]), None) return (prices[0].text[:-2].replace(',', '.'), prices[1].text[:-2].replace(',', '.'), countdown_value[0] + '-' + countdown_month) except AttributeError: prRed(f"ERROR: AttributeError while scraping {url}") return None except TimeoutException: prLightOrange("WARNING: Timed out waiting for prices to load") return None except IndexError: prLightOrange( f'WARNING: Race condition occurred while getting price from {url}') return None