class Crawler: def __init__(self, tibia_url, character, downloader): self.tibia_url = tibia_url self.downlaoder = downloader self.parser = Parser() self.get_tibia_information(character) def get_tibia_information(self, character): character_url = urljoin(self.tibia_url, "community/?subtopic=characters") params = self.config_params(character) response = self.downlaoder.post(character_url, data=params) if self.parser.character_not_found(response.text): parsed = self.parser.parse(response.text) self.save_data(parsed.__dict__) return parsed def config_params(self, character): return {"name": character, "Submit.x": 0, "Submit.y": 0} def save_data(self, data): name = data.get("name") with open(f"tibia/database/{name}.json", "w", encoding="utf-8") as f: json.dump(data, f) print("Personagem salvo com sucesso!")
def __init__(self, tibia_url, character, downloader): self.tibia_url = tibia_url self.downlaoder = downloader self.parser = Parser() self.get_tibia_information(character)
def testNotFound(snapshot, notFoundHtml): confirmation = Parser().characterNotFound(notFoundHtml) assert not confirmation == True
def testExtractAccountStatus(snapshot, resumeHtml): parsed = BeautifulSoup(resumeHtml, "html.parser") text = Parser().extract_account_status(parsed) snapshot.assert_match(text)
def testExtractDeaths(snapshot, resumeHtml): parsed = BeautifulSoup(resumeHtml, "html.parser") deaths = Parser().extract_deaths(parsed) for death in deaths: snapshot.assert_match(death)
def testExtractGuildMembership(snapshot, resumeHtml): parsed = BeautifulSoup(resumeHtml, "html.parser") text = Parser().extract_guild_membership(parsed) snapshot.assert_match(text)
def testExtractLastLogin(snapshot, resumeHtml): parsed = BeautifulSoup(resumeHtml, "html.parser") text = Parser().extract_last_login(parsed) snapshot.assert_match(text)
def testExtractResidence(snapshot, resumeHtml): parsed = BeautifulSoup(resumeHtml, "html.parser") text = Parser().extract_residence(parsed) snapshot.assert_match(text)
def testExtractAchivements(snapshot, resumeHtml): parsed = BeautifulSoup(resumeHtml, "html.parser") text = Parser().extract_achivement(parsed) snapshot.assert_match(text)
def testExtractVocation(snapshot, resumeHtml): parsed = BeautifulSoup(resumeHtml, "html.parser") text = Parser().extract_vocation(parsed) snapshot.assert_match(text)
def test_not_found(snapshot, not_found_html): confirmation = Parser().character_not_found(not_found_html) assert not confirmation == True
def test_extract_level(snapshot, resume_html): parsed = BeautifulSoup(resume_html, "html.parser") text = Parser().extract_level(parsed) snapshot.assert_match(text)