@@ -352,9 +352,6 @@ We do offer two ways to boost your searches thanks to `async` parameter.
352352 - Non-blocking - async=true - it's way to go for large amount of query submitted by batch (recommended)
353353
354354``` python
355- # Python 3.6+ (tested)
356- #
357-
358355# Operating system
359356import os
360357
@@ -372,47 +369,52 @@ from serpapi import GoogleSearch
372369
373370# store searches
374371search_queue = Queue()
375-
372+
376373# SerpApi search
377374search = GoogleSearch({
378375 " location" : " Austin,Texas" ,
379- " async" : True
376+ " async" : True ,
377+ " api_key" : os.getenv(" API_KEY" )
380378})
381379
382380# loop through a list of companies
383- for company in [' amd' ,' nvidia' ,' intel' ]:
384- print (" execute async search: q = " + company)
385- search.params_dict[" q" ] = company
386- search = search.get_dict()
387- print (" add search to the queue where id: " + search[' search_metadata' ][' id' ])
388- # add search to the search_queue
389- search_queue.put(search)
381+ for company in [' amd' , ' nvidia' , ' intel' ]:
382+ print (" execute async search: q = " + company)
383+ search.params_dict[" q" ] = company
384+ result = search.get_dict()
385+ if " error" in result:
386+ print (" oops error: " , result[" error" ])
387+ continue
388+ print (" add search to the queue where id: " , result[' search_metadata' ])
389+ # add search to the search_queue
390+ search_queue.put(result)
390391
391392print (" wait until all search statuses are cached or success" )
392393
393394# Create regular search
394- search = GoogleSearch({" async" : True })
395395while not search_queue.empty():
396- search = search_queue.get()
397- search_id = search[' search_metadata' ][' id' ]
398-
399- # retrieve search from the archive - blocker
400- print (search_id + " : get search from archive" )
401- search_archived = search.get_search_archive(search_id)
402- print (search_id + " : status = " + search_archived[' search_metadata' ][' status' ])
403-
404- # check status
405- if re.search(' Cached|Success' , search_archived[' search_metadata' ][' status' ]):
406- print (search_id + " : search done with q = " + search_archived[' search_parameters' ][' q' ])
407- else :
408- # requeue search_queue
409- print (search_id + " : requeue search" )
410- search_queue.put(search)
411-
412- # wait 1s
413- time.sleep(1 )
414-
415- # self.assertIsNotNone(results["local_results"][0]["title"])
396+ result = search_queue.get()
397+ search_id = result[' search_metadata' ][' id' ]
398+
399+ # retrieve search from the archive - blocker
400+ print (search_id + " : get search from archive" )
401+ search_archived = search.get_search_archive(search_id)
402+ print (search_id + " : status = " +
403+ search_archived[' search_metadata' ][' status' ])
404+
405+ # check status
406+ if re.search(' Cached|Success' ,
407+ search_archived[' search_metadata' ][' status' ]):
408+ print (search_id + " : search done with q = " +
409+ search_archived[' search_parameters' ][' q' ])
410+ else :
411+ # requeue search_queue
412+ print (search_id + " : requeue search" )
413+ search_queue.put(search)
414+
415+ # wait 1s
416+ time.sleep(1 )
417+
416418print (' all searches completed' )
417419```
418420
0 commit comments