def test_GetPlatformMobygames(self): scraper = Mobygames_Scraper() platform = scraper.get_platform_for_scraper('PlayStation') self.assertEqual( platform, "6", "Did not get expected platform name for {0} scraper".format( scraper.name))
def test_parse_release_result_missingelements(self): f = os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'tests', 'testdata', 'scraper_web_responses', 'mobygames_getrelease_missingelements.json') scraper = Mobygames_Scraper() with open(f) as jsonfile: data = jsonfile.read() result = scraper._parse_release_result(json.loads(data)) print "Release result is {0}".format(result)
def test_ErrorResponseAPIKeyExeeded(self): f = os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'tests', 'testdata', 'scraper_web_responses', 'mobygames_error_apikey_exceeded.json') scraper = Mobygames_Scraper() with open(f) as jsonfile: data = jsonfile.read() with self.assertRaises(ScraperExceededAPIQuoteException): scraper._check_status_code(json.loads(data)['status_code'])
def test_search_game_api_key_exceeded(self): responses.add(responses.GET, 'https://api.mobygames.com/v1/games?platform=6&format=brief&api_key=FH9VxTkB6BGAEsF3qlnnxQ%3D%3D&title=WipEout XL', json=self._loadJsonFromFile('mobygames_error_apikey_exceeded.json'), status=429) scraper = Mobygames_Scraper() with self.assertRaises(ScraperExceededAPIQuoteException): scraper.search('WipEout XL', 'PlayStation')
def test_search_game(self): responses.add(responses.GET, 'https://api.mobygames.com/v1/games?platform=6&format=brief&api_key=FH9VxTkB6BGAEsF3qlnnxQ%3D%3D&title=WipEout XL', json=self._loadJsonFromFile('mobygames_getgameslist.json'), status=200) scraper = Mobygames_Scraper() result = scraper.search('WipEout XL', 'PlayStation') self.assertEquals(result[0]['title'], 'WipEout XL')
def test_search_game(self): responses.add( responses.GET, 'https://api.mobygames.com/v1/games?platform=6&format=brief&api_key=FH9VxTkB6BGAEsF3qlnnxQ%3D%3D&title=WipEout', json=self._loadJsonFromFile('mobygames_getgameslist.json'), status=200) scraper = Mobygames_Scraper() result = scraper.search('WipEout XL', 'PlayStation') self.assertEquals(result[0]['title'], 'WipEout XL')
def test_parse_screenshots_result(self): f = os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'tests', 'testdata', 'scraper_web_responses', 'mobygames_getscreenshots.json') scraper = Mobygames_Scraper() with open(f) as jsonfile: data = jsonfile.read() result = scraper._parse_screenshots_result(json.loads(data)) print "Screenshots result is {0}".format(result) self.assertEquals(result['Filetypescreenshot'], ['http://www.mobygames.com/images/shots/l/436082-wipeout-xl-playstation-screenshot-wipeout-xl-title-screen.png'])
def test_GamesListNoResults(self): f = os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'tests', 'testdata', 'scraper_web_responses', 'mobygames_getgameslist_noresults.json') scraper = Mobygames_Scraper() with open(f) as jsonfile: data = jsonfile.read() results = scraper._parse_search_results(json.loads(data)) # Expect 0 results and an empty list self.assertIsInstance(results, list, "Expected search results to return list even if no results found") self.assertEqual(len(results), 0, "Empty search results should return empty list")
def test_parse_screenshots_result_empty(self): f = os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'tests', 'testdata', 'scraper_web_responses', 'mobygames_getscreenshots_empty.json') scraper = Mobygames_Scraper() with open(f) as jsonfile: data = jsonfile.read() result = scraper._parse_screenshots_result(json.loads(data)) print "Screenshots result is {0}".format(result) self.assertEquals(len(result), 0, 'Result length expected to be 0')
def test_retrieve_game(self): # first call gets general game data responses.add( responses.GET, 'https://api.mobygames.com/v1/games/33250?api_key=FH9VxTkB6BGAEsF3qlnnxQ%3D%3D', json=self._loadJsonFromFile('mobygames_getgame.json'), status=200) # second call gets platform specific release data responses.add( responses.GET, 'https://api.mobygames.com/v1/games/33250/platforms/6?api_key=FH9VxTkB6BGAEsF3qlnnxQ%3D%3D', json=self._loadJsonFromFile( 'mobygames_getrelease_missingelements.json'), status=200) # third call gets platform specific covers responses.add( responses.GET, 'https://api.mobygames.com/v1/games/33250/platforms/6/covers?api_key=FH9VxTkB6BGAEsF3qlnnxQ%3D%3D', json=self._loadJsonFromFile('mobygames_getcovers.json'), status=200) # fourth call gets platform specific screenshots responses.add( responses.GET, 'https://api.mobygames.com/v1/games/33250/platforms/6/screenshots?api_key=FH9VxTkB6BGAEsF3qlnnxQ%3D%3D', json=self._loadJsonFromFile('mobygames_getscreenshots.json'), status=200) scraper = Mobygames_Scraper() result = scraper.retrieve(33250, 'PlayStation') self.assertEquals(result['Game'], ['WipEot 3 Special Edition / Destrction Derby 2']) self.assertEquals(result['ReleaseYear'], ['2003']) self.assertEquals(result['Genre'], ['Compilation']) self.assertEquals(result['Publisher'], ['Sony Computer Entertainment Europe Ltd.']) self.assertEquals(result['Players'], ['1-2 Players']) self.assertEquals(result['Filetypeboxback'], [ 'http://www.mobygames.com/images/covers/l/175220-wipeout-xl-playstation-back-cover.png' ]) self.assertEquals(result['Filetypescreenshot'], [ 'http://www.mobygames.com/images/shots/l/436082-wipeout-xl-playstation-screenshot-wipeout-xl-title-screen.png' ]) self.assertEquals(result['Filetypeboxfront'], [ 'http://www.mobygames.com/images/covers/l/175218-wipeout-xl-playstation-front-cover.png' ]) self.assertEquals(result['Filetypecartridge'], [ 'http://www.mobygames.com/images/covers/l/175219-wipeout-xl-playstation-media.png' ])
def test_parse_game_result_WithMultipleGenres(self): f = os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'tests', 'testdata', 'scraper_web_responses', 'mobygames_getgame_2genres.json') scraper = Mobygames_Scraper() with open(f) as jsonfile: data = jsonfile.read() result = scraper._parse_game_result(json.loads(data)) print "Game result is {0}".format(result) # Genres - multiple genres, but only 2 "Basic Genres" self.assertEquals(len(result['Genre']), 2, "Expected 2 genres") self.assertIn("Racing / Driving", result['Genre'], "Expected genre Racing / Driving to be retrieved") self.assertIn("Action", result['Genre'], "Expected genre Action to be retrieved")
def test_parse_covers_result(self): f = os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'tests', 'testdata', 'scraper_web_responses', 'mobygames_getcovers.json') scraper = Mobygames_Scraper() with open(f) as jsonfile: data = jsonfile.read() result = scraper._parse_covers_result(json.loads(data)) print "Covers result is {0}".format(result) self.assertEquals(result['Filetypeboxfront'], ['http://www.mobygames.com/images/covers/l/175218-wipeout-xl-playstation-front-cover.png']) self.assertEquals(result['Filetypeboxback'], ['http://www.mobygames.com/images/covers/l/175220-wipeout-xl-playstation-back-cover.png']) self.assertEquals(result['Filetypecartridge'], ['http://www.mobygames.com/images/covers/l/175219-wipeout-xl-playstation-media.png'])
def test_parse_release_result(self): f = os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'tests', 'testdata', 'scraper_web_responses', 'mobygames_getrelease.json') scraper = Mobygames_Scraper() with open(f) as jsonfile: data = jsonfile.read() result = scraper._parse_release_result(json.loads(data)) print "Release result is {0}".format(result) self.assertEquals(result['ReleaseYear'], ['1994']) self.assertEquals(result['Publisher'], ['Interplay Productions, Inc.']) self.assertEquals(result['Developer'], ['Infogrames Europe SA']) self.assertEquals(result['Players'], ['1 Player']) self.assertEquals(result['Controller'], ['Controller Pad'])
def test_GamesListResults(self): f = os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'tests', 'testdata', 'scraper_web_responses', 'mobygames_getgameslist.json') scraper = Mobygames_Scraper() with open(f) as jsonfile: data = jsonfile.read() results = scraper._parse_search_results(json.loads(data)) # Expect 10 results per page (regardless of total) self.assertEqual(len(results), 5, "Number of search results for multiple search match did not match expected number") self.assertEqual(results[0]['title'], "WipEout XL", "Incorrect title for first result") self.assertEqual(results[0]['id'], 3134, "Incorrect game ID for first result") # MobyGames does not return release date in brief results self.assertEqual(results[0]['releaseDate'], "", "Incorrect releaseDate for first result")
def test_parse_game_result(self): f = os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'tests', 'testdata', 'scraper_web_responses', 'mobygames_getgame.json') scraper = Mobygames_Scraper() with open(f) as jsonfile: data = jsonfile.read() result = scraper._parse_game_result(json.loads(data)) print "Game result is {0}".format(result) self.assertEquals(result['Game'], ['WipEot 3 Special Edition / Destrction Derby 2']) # The release date for PlayStation in this result set is %Y #ReleaseYear is moved to parse_release_data #self.assertEquals(result['ReleaseYear'], ['2003']) # Genres self.assertEquals(result['Genre'], ['Compilation'])
def test_retrieve_game(self): # first call gets general game data responses.add(responses.GET, 'https://api.mobygames.com/v1/games/33250?api_key=FH9VxTkB6BGAEsF3qlnnxQ%3D%3D', json=self._loadJsonFromFile('mobygames_getgame.json'), status=200) # second call gets platform specific release data responses.add(responses.GET, 'https://api.mobygames.com/v1/games/33250/platforms/6?api_key=FH9VxTkB6BGAEsF3qlnnxQ%3D%3D', json=self._loadJsonFromFile('mobygames_getrelease_missingelements.json'), status=200) # third call gets platform specific covers responses.add(responses.GET, 'https://api.mobygames.com/v1/games/33250/platforms/6/covers?api_key=FH9VxTkB6BGAEsF3qlnnxQ%3D%3D', json=self._loadJsonFromFile('mobygames_getcovers.json'), status=200) # fourth call gets platform specific screenshots responses.add(responses.GET, 'https://api.mobygames.com/v1/games/33250/platforms/6/screenshots?api_key=FH9VxTkB6BGAEsF3qlnnxQ%3D%3D', json=self._loadJsonFromFile('mobygames_getscreenshots.json'), status=200) scraper = Mobygames_Scraper() result = scraper.retrieve(33250, 'PlayStation') self.assertEquals(result['Game'], ['WipEot 3 Special Edition / Destrction Derby 2']) self.assertEquals(result['ReleaseYear'], ['2003']) self.assertEquals(result['Genre'], ['Compilation']) self.assertEquals(result['Publisher'], ['Sony Computer Entertainment Europe Ltd.']) self.assertEquals(result['Players'], ['1-2 Players']) self.assertEquals(result['Filetypeboxback'], ['http://www.mobygames.com/images/covers/l/175220-wipeout-xl-playstation-back-cover.png']) self.assertEquals(result['Filetypescreenshot'], ['http://www.mobygames.com/images/shots/l/436082-wipeout-xl-playstation-screenshot-wipeout-xl-title-screen.png']) self.assertEquals(result['Filetypeboxfront'], ['http://www.mobygames.com/images/covers/l/175218-wipeout-xl-playstation-front-cover.png']) self.assertEquals(result['Filetypecartridge'], ['http://www.mobygames.com/images/covers/l/175219-wipeout-xl-playstation-media.png'])
def test_GetPlatformMobygames(self): scraper = Mobygames_Scraper() platform = scraper.get_platform_for_scraper('PlayStation') self.assertEqual(platform, "6", "Did not get expected platform name for {0} scraper".format(scraper.name))