def test_get_dictionary(self): client = GoogleSearchResults({ "q": "Coffee", "location": "Austin,Texas" }) data = client.get_dictionary() self.assertIsNotNone(data.get('local_results'))
def test_get_html(self): client = GoogleSearchResults({ "q": "Coffee", "location": "Austin,Texas" }) data = client.get_html() self.assertGreater(len(data), 10)
def test_get_dict(self): search = GoogleSearchResults({ "q": "Coffee", "location": "Austin,Texas" }) data = search.get_dict() self.assertIsNotNone(data.get('local_results'))
def test_get_json(self): client = GoogleSearchResults({ "q": "Coffee", "engine": "google_scholar" }) data = client.get_json() self.assertIsNotNone(data["organic_results"][0]["title"])
def test_search_google_images(self): search = GoogleSearchResults({"q": "coffe", "tbm": "isch"}) for image_result in search.get_json()['images_results']: try: link = image_result["original"] print("link is found: " + link) # uncomment the line below to down the original image # wget.download(link, '.') except: print("link is not found.") pass
def test_get_json(self): client = GoogleSearchResults({ "q": "Coffee", "location": "Austin,Texas" }) data = client.get_json() self.assertEqual(data["search_metadata"]["status"], "Success") self.assertIsNotNone(data["search_metadata"]["google_url"]) self.assertIsNotNone(data["search_metadata"]["id"]) # pp = pprint.PrettyPrinter(indent=2) # pp.pprint(data['local_results']) self.assertIsNotNone(data['local_results']['places'][0])
def test_search_google_shopping(self): search = GoogleSearchResults({ "q": "coffe", # search search "tbm": "shop", # news "tbs": "p_ord:rv", # last 24h "num": 100 }) data = search.get_json() for shopping_result in data['shopping_results']: print( str(shopping_result['position']) + " - " + shopping_result['title'])
def test_search_by_location(self): for city in ["new york", "paris", "berlin"]: location = GoogleSearchResults({}).get_location( city, 1)[0]["canonical_name"] search = GoogleSearchResults({ "q": "best coffee shop", # search search "location": location, "num": 10, "start": 0 }) data = search.get_json() top_result = data['organic_results'][0]["title"] print("top coffee result for " + location + " is: " + top_result)
def test_search_google_news(self): search = GoogleSearchResults({ "q": "coffe", # search search "tbm": "nws", # news "tbs": "qdr:d", # last 24h "num": 10 }) for offset in [0, 1, 2]: search.params_dict["start"] = offset * 10 data = search.get_json() for news_result in data['news_results']: print( str(news_result['position'] + offset * 10) + " - " + news_result['title'])
def test_get_search_archive(self): search = GoogleSearchResults({ "q": "Coffee", "location": "Austin,Texas" }) search_result = search.get_dictionary() search_id = search_result.get("search_metadata").get("id") archived_search_result = GoogleSearchResults({}).get_search_archive( search_id, 'json') self.assertEqual( archived_search_result.get("search_metadata").get("id"), search_id) html_buffer = GoogleSearchResults({}).get_search_archive( search_id, 'html') self.assertGreater(len(html_buffer), 10000)
def test_async(self): # store searches search_queue = Queue() # Serp API search search = GoogleSearchResults({ "location": "Austin,Texas", "async": True }) # loop through companies for company in ['amd', 'nvidia', 'intel']: print("execute async search: q = " + company) search.params_dict["q"] = company data = search.get_dict() print("add search to the queue where id: " + data['search_metadata']['id']) # add search to the search_queue search_queue.put(data) print("wait until all search statuses are cached or success") # Create regular search search = GoogleSearchResults({"async": True}) while not search_queue.empty(): data = search_queue.get() search_id = data['search_metadata']['id'] # retrieve search from the archive - blocker print(search_id + ": get search from archive") search_archived = search.get_search_archive(search_id) print(search_id + ": status = " + search_archived['search_metadata']['status']) # check status if re.search('Cached|Success', search_archived['search_metadata']['status']): print(search_id + ": search done with q = " + search_archived['search_parameters']['q']) else: # requeue search_queue print(search_id + ": requeue search") search_queue.put(search) # wait 1s time.sleep(1) # search is over. print('all searches completed')
def test_get_account(self): search = GoogleSearchResults({}) account = search.get_account() self.assertIsNotNone(account.get("account_id")) self.assertEqual(account.get("api_key"), GoogleSearchResults.SERP_API_KEY)
import sys import os from serpapi import GoogleSearchResults # Run Out Of Box testing # Load package # Run simple query # import pprint print("initialize serpapi search") search = GoogleSearchResults({ "q": "coffee", "location": "Austin,Texas", "api_key": os.getenv("API_KEY", "demo") }) print("execute search") result = search.get_dict() print("display result") pp = pprint.PrettyPrinter(indent=2) pp.pprint(result) print("------") if len(result) > 0: print("OK: Out out box testing is passing") sys.exit(0) print("FAIL: Out box testing is failing: no result") sys.exit(1)
def test_get_location(self): client = GoogleSearchResults({"q": None, "async": True}) location_list = client.get_location("Austin", 3) self.assertIsNotNone(location_list[0].get("id")) pp = pprint.PrettyPrinter(indent=2) pp.pprint(location_list)