async def _parse_spot_bait_metadata_average_time_to_catch( cls, spot_bait_metadata_map: Dict[int, SpotBaitMetadata], soup: BeautifulSoup) -> List[SpotBaitMetadata]: # noinspection SpellCheckingInspection for td in soup.find_all('td', {'class': 'hooktime'}): # type: Tag bait_angler_id: int = int( non_number_replacement_regex.sub(repl='', string=td.find( 'a', { 'class': 'clear_icon' }).attrs['href'])) try: bait = spot_bait_metadata_map[bait_angler_id] except KeyError: if bait_angler_id in angler_bait_blacklisted_bait_id: continue raise # noinspection SpellCheckingInspection for a_tag in td.find_all('a', {'rsec': True}): # type: Tag fish_angler_id: int = int( non_number_replacement_regex.sub( repl='', string=a_tag.attrs['href'])) for fish_info in bait.spot_angler_bait_fish_catch_info: if fish_info.spot_fish_id.fish_angler_fish_id == fish_angler_id: # noinspection SpellCheckingInspection fish_info.spot_angler_fish_average_seconds_to_hook = int( a_tag.attrs['rsec']) break return list(spot_bait_metadata_map.values())
async def _parse_angler_bait_id(td2: Tag) -> int: a_tag = td2.find('a') if a_tag: return int( non_number_replacement_regex.sub(repl='', string=a_tag.attrs['href'])) return int( non_number_replacement_regex.sub( repl='', string=td2.find('img').attrs['src']))
async def _parse_angler_gathering_spots( soup: BeautifulSoup) -> List[SpotId]: # Avoiding circular imports from ff14angler.dataClasses.spot.spotProvider import SpotProvider temp_fishing_spot_list: List[SpotId] = [] spot_form = soup.find('form', {'name': 'spot_delete'}) if spot_form: # noinspection SpellCheckingInspection body = spot_form.find_all('tbody')[1] for tag in body.find_all('tr'): # type: Tag if not tag.find('a'): continue td1, _, td3 = tag.find_all('td') # type: Tag, _, Tag spot_angler_spot_id: int = int( non_number_replacement_regex.sub( repl='', string=td1.find('a').attrs['href'])) temp_fishing_spot_list.append( await SpotProvider.get_spot_id_from_angler_id( spot_angler_spot_id)) return temp_fishing_spot_list
async def _parse_angler_effective_bait_from_spot_soup( soup: BeautifulSoup) -> Dict[int, BaitId]: temp_bait_map: Dict[int, BaitId] = dict() table = soup.find('table', {'id': 'effective_bait'}) for row_num, row in enumerate(table.find_all('tr')): # type: int, Tag if row_num == 0: continue img_holder = row.select('.clear_icon') if img_holder: bait_angler_id: int = int( non_number_replacement_regex.sub( repl='', string=img_holder[0].find('img').attrs['src'])) try: bait = BaitProvider.bait_holder[bait_angler_id] except KeyError: bait_angler_name: str = img_holder[0].attrs['title'] bait = await BaitProvider.get_bait_from_angler_bait( bait_angler_id, bait_angler_name) await bait.update_bait_with_assume_is_mooch_fish() temp_bait_map[bait.bait_id.bait_angler_bait_id] = bait.bait_id return temp_bait_map
async def _parse_angler_available_fish_from_spot_soup( soup: BeautifulSoup) -> List[FishId]: temp_fish_list: List[FishId] = [] form = soup.find('form', {'name': 'spot_delete'}) # noinspection SpellCheckingInspection body = form.find_all('tbody')[1] for tag in body.find_all('tr'): # type: Tag tds = tag.find_all('td') td2: Tag = tds[1] td4: Tag = tds[3] fish_angler_id: int = int( non_number_replacement_regex.sub( repl='', string=td2.find('a').attrs['href'])) fish_angler_name: str = td2.text.strip() fish = await FishProvider.get_fish_from_angler_fish( fish_angler_id, fish_angler_name) tug_canvas = td4.find('canvas') if tug_canvas: canvas_data: str = tug_canvas.attrs['data-value'] else: canvas_data = '{}' temp_fish_list.append(fish.fish_id) await fish.update_fish_with_tug_strength(json.loads(canvas_data)) return temp_fish_list
async def _get_request_metadata_from_web_driver( cls, driver: WebDriver) -> Tuple[int, int, int, int]: response: Tuple[int, str, str, str] = driver.execute_script( comment_metadata_javascript) request_id: int = response[0] type_id: int = int(response[1]) item_id: int = int(response[2]) max_comments: int = int( non_number_replacement_regex.sub(repl='', string=response[3]) or '0') return request_id, type_id, item_id, max_comments
async def _check_if_is_spearfishing_spot(soup: BeautifulSoup) -> bool: for tag in soup.find('table', { 'id': 'effective_bait' }).find_all('tr'): # type: Tag bait_span = tag.find('span', {'class': 'clear_icon'}) if bait_span: bait_id: str = non_number_replacement_regex.sub( repl='', string=bait_span.find('img').attrs['src']) if bait_id in {'2001', '2002', '2003'}: return True return False
async def _get_alt_currency_prices( special_shops: Optional[Dict[str, List[int]]]) -> List[BaitAltCurrency]: shop_holder: Set[Tuple[int, str, int]] = set() if special_shops is not None: for shop_item_label, shop_list in special_shops.items(): shop_response = await XivapiWrapper.xivapi_special_shop_lookup( shop_list[0]) shop_item_num: str = non_number_replacement_regex.sub( repl='', string=shop_item_label) shop_holder.add(( shop_response[f'ItemCost{shop_item_num}TargetID'], shop_response[f'ItemCost{shop_item_num}']['Name_en'], shop_response[f'CountCost{shop_item_num}'], )) return [BaitAltCurrency(*shop) for shop in shop_holder]
async def update_spot_with_spot_soup(self, soup: BeautifulSoup) -> 'Spot': spot_info: Tag = soup.find('table', {'class': 'spot_info'}) self.spot_angler_area_id = await self._parse_angler_area_id(spot_info) self.spot_angler_catch_metadata = await SpotCatchMetadata.get_spot_catch_metadata_from_spot_soup( soup) self.spot_angler_fishers_intuition_comment = await self._parse_angler_fishers_intuition_comment( spot_info) self.spot_angler_gathering_level = int( non_number_replacement_regex.sub(repl='', string=spot_info.find( 'span', { 'class': 'level' }).text)) self.spot_angler_x_coord = await self._parse_angler_x_coord(spot_info) self.spot_angler_y_coord = await self._parse_angler_y_coord(spot_info) if await self._check_if_is_spearfishing_spot(soup): await self.update_spot_with_assume_is_spearfishing_spot() else: await self.update_spot_with_assume_is_fishing_spot() return self
async def _parse_spot_bait_metadata( cls, available_fish: List[FishId], effective_bait: Dict[int, BaitId], soup: BeautifulSoup) -> List[SpotBaitMetadata]: spot_bait_metadata_map: Dict[int, SpotBaitMetadata] = dict() table = soup.find('table', {'id': 'effective_bait'}) for row_num, row in enumerate(table.find_all('tr')): # type: int, Tag if row_num == 0: continue for cell_num, cell in enumerate(row.find_all('td')): if cell_num == 0: bait_img = row.select('.clear_icon')[0].find('img') bait_angler_id: int = int( non_number_replacement_regex.sub( repl='', string=bait_img.attrs['src'])) bait_metadata = SpotBaitMetadata( effective_bait[bait_angler_id]) spot_bait_metadata_map[bait_angler_id] = bait_metadata continue fish_rate = cell.find('div', {'class': 'fish_rate clear_icon'}) if fish_rate: if float(fish_rate.find('canvas').attrs['value']) <= 0: continue data: str = fish_rate.attrs['title'].split()[-1] caught_count, caught_total = await cls._parse_caught_count_caught_total( data) caught_percent: str = data.replace( f'({caught_count}/{caught_total})', '').strip() # noinspection PyUnboundLocalVariable bait_metadata.update_spot_bait_metadata_with_spot_bait_fish_caught( caught_count, caught_percent, caught_total, available_fish[cell_num - 1]) return await cls._parse_spot_bait_metadata_average_time_to_catch( spot_bait_metadata_map, soup)
async def get_leve_from_soup(cls, soup: Tag) -> 'FishLeve': tds = soup.find_all('td') td1: Tag = tds[0] td3: Tag = tds[2] td4: Tag = tds[3] angler_leve_name_jp: str = td1.find('font').text.strip() angler_leve_name: str = await cls._parse_leve_name(td1) search_responses = await XivapiWrapper.xivapi_leve_search( angler_leve_name) for search_response in search_responses: lookup_response = await XivapiWrapper.xivapi_leve_lookup( search_response['ID']) if lookup_response['CraftLeve']: return cls( leve_angler_fish_id=int( td3.find('a').attrs['href'].split('/')[-1]), leve_angler_fish_name=td3.text.strip(), leve_angler_name=angler_leve_name, leve_angler_name_jp=angler_leve_name_jp, leve_angler_turn_in_count=int( non_number_replacement_regex.sub(repl='', string=td4.text)), leve_name_de=lookup_response['Name_de'], leve_name_en=lookup_response['Name_en'], leve_name_fr=lookup_response['Name_fr'], leve_name_ja=lookup_response['Name_ja'], leve_id=lookup_response['ID'], leve_item_id=lookup_response['CraftLeve']['Item0']['ID'], leve_item_name=lookup_response['CraftLeve']['Item0'] ['Name_en'], leve_level=lookup_response['ClassJobLevel']) raise ValueError( f'Could not find applicable leve for leve name: {angler_leve_name}' )