Skip to content

Commit 009b052

Browse files
committed
add more search engines
- youtube - walmart - apple_app_store - naver
1 parent 5061e5b commit 009b052

13 files changed

+272
-3
lines changed

README.md

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,11 +50,18 @@ Alternatively, you can search:
5050
- Bing using BingSearch class
5151
- Baidu using BaiduSearch class
5252
- Yahoo using YahooSearch class
53+
- duckduckgo using DuckDuckGoSearch class
5354
- Ebay using EbaySearch class
5455
- Yandex using YandexSearch class
5556
- HomeDepot using HomeDepotSearch class
5657
- GoogleScholar using GoogleScholarSearch class
5758

59+
- youtube using YoutubeSearch class
60+
- walmart using WalmartSearch
61+
- apple_app_store using AppleAppStoreSearch class
62+
- naver using NaverSearch class
63+
64+
5865
See the [playground to generate your code.](https://serpapi.com/playground)
5966

6067
## Summary
@@ -546,34 +553,56 @@ In some case, there is more details availabel in the data object.
546553
If it's client error, then a SerpApiClientException is raised.
547554

548555
## Change log
556+
2021-12-22 @ 2.5.1
557+
- add more search engine
558+
- youtube
559+
- walmart
560+
- apple_app_store
561+
- naver
562+
563+
2021-09-12 @ 2.5.0
564+
- Fix pagination to support all search engine
565+
- Add duckduckgo support
566+
- Move constant to one file
567+
549568
2021-09-01 @ 2.4.1
550569
- raise SerpApiClientException instead of raw string in order to follow Python guideline 3.5+
551570
- add more unit error tests for serp_api_client
571+
552572
2021-07-26 @ 2.4.0
553573
- add page size support using num parameter
554574
- add youtube search engine
575+
555576
2021-06-05 @ 2.3.0
556577
- add pagination support
578+
557579
2021-04-28 @ 2.2.0
558580
- add get_response method to provide raw requests.Response object
581+
559582
2021-04-04 @ 2.1.0
560583
- Add home depot search engine
561584
- get_object() returns dynamic Python object
585+
562586
2020-10-26 @ 2.0.0
563587
- Reduce class name to <engine>Search
564588
- Add get_raw_json
589+
565590
2020-06-30 @ 1.8.3
566591
- simplify import
567592
- improve package for python 3.5+
568593
- add support for python 3.5 and 3.6
594+
569595
2020-03-25 @ 1.8
570596
- add support for Yandex, Yahoo, Ebay
571597
- clean-up test
598+
572599
2019-11-10 @ 1.7.1
573600
- increase engine parameter priority over engine value set in the class
601+
574602
2019-09-12 @ 1.7
575603
- Change namespace "from lib." instead: "from serpapi import GoogleSearch"
576604
- Support for Bing and Baidu
605+
577606
2019-06-25 @ 1.6
578607
- New search engine supported: Baidu and Bing
579608

serpapi/__init__.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,3 +8,8 @@
88
from .ebay_search import EbaySearch
99
from .home_depot_search import HomeDepotSearch
1010
from .youtube_search import YoutubeSearch
11+
from .duck_duck_go_search import DuckDuckGoSearch
12+
from .walmart_search import WalmartSearch
13+
from .naver_search import NaverSearch
14+
from .apple_app_store_search import AppleAppStoreSearch
15+

serpapi/apple_app_store_search.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
from serpapi.serp_api_client import *
2+
from serpapi.serp_api_client_exception import SerpApiClientException
3+
from serpapi.constant import *
4+
5+
class AppleAppStoreSearch(SerpApiClient):
6+
"""AppleAppStoreSearch enables to search google scholar and parse the result.
7+
```python
8+
from serpapi import AppleAppStoreSearch
9+
search = AppleAppStoreSearch({"q": "chair"})
10+
data = search.get_json()
11+
```
12+
13+
doc: https://serpapi.com/apple-app-store
14+
"""
15+
16+
def __init__(self, params_dict):
17+
super(AppleAppStoreSearch, self).__init__(params_dict, APPLE_APP_STORE_ENGINE)
18+
19+
def get_location(self, q, limit = 5):
20+
raise SerpApiClientException("location is not supported by youtube search engine")

serpapi/constant.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
2+
# Pagination constant
3+
DEFAULT_START = 0
4+
DEFAULT_END = 100
5+
DEFAULT_PAGE_SIZE = 10
6+
7+
# Supported earch engine
8+
GOOGLE_ENGINE = 'google'
9+
GOOGLE_SCHOLAR_ENGINE = 'google_scholar'
10+
BAIDU_ENGINE = 'baidu'
11+
BING_ENGINE = 'bing'
12+
YANDEX_ENGINE = 'yandex'
13+
EBAY_ENGINE = 'ebay'
14+
YAHOO_ENGINE = 'yahoo'
15+
HOME_DEPOT_ENGINE = 'home_depot'
16+
YOUTUBE_ENGINE = 'youtube'
17+
DUCKDUCKGO_ENGINE = 'duckduckgo'
18+
WALMART_ENGINE = "walmart"
19+
NAVER_ENGINE = "naver"
20+
APPLE_APP_STORE_ENGINE = "apple_app_store"
21+
22+
# from serpapi.constant import *

serpapi/naver_search.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
from serpapi.serp_api_client import *
2+
from serpapi.serp_api_client_exception import SerpApiClientException
3+
from serpapi.constant import *
4+
5+
class NaverSearch(SerpApiClient):
6+
"""NaverSearch enables to search google scholar and parse the result.
7+
```python
8+
from serpapi import NaverSearch
9+
search = NaverSearch({"query": "chair"})
10+
data = search.get_json()
11+
```
12+
13+
doc: https://serpapi.com/naver-search-api
14+
"""
15+
16+
def __init__(self, params_dict):
17+
super(NaverSearch, self).__init__(params_dict, NAVER_ENGINE)
18+
19+
def get_location(self, q, limit = 5):
20+
raise SerpApiClientException("location is not supported by youtube search engine")

serpapi/serp_api_client.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,7 @@ def get_response(self, path = '/search'):
5555
url = None
5656
try:
5757
url, parameter = self.construct_url(path)
58+
print(url)
5859
response = requests.get(url, parameter, timeout=self.timeout)
5960
return response
6061
except requests.HTTPError as e:

serpapi/walmart_search.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
from serpapi.serp_api_client import *
2+
from serpapi.serp_api_client_exception import SerpApiClientException
3+
from serpapi.constant import *
4+
5+
class WalmartSearch(SerpApiClient):
6+
"""WalmartSearch enables to search google scholar and parse the result.
7+
```python
8+
from serpapi import WalmartSearch
9+
search = WalmartSearch({"query": "chair"})
10+
data = search.get_json()
11+
```
12+
13+
doc: https://serpapi.com/walmart-search-api
14+
"""
15+
16+
def __init__(self, params_dict):
17+
super(WalmartSearch, self).__init__(params_dict, WALMART_ENGINE)
18+
19+
def get_location(self, q, limit = 5):
20+
raise SerpApiClientException("location is not supported by walmart search engine")
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
import random
2+
import unittest
3+
import os
4+
import pprint
5+
from serpapi import AppleAppStoreSearch
6+
7+
class TestAppleAppStoreSearch(unittest.TestCase):
8+
9+
def setUp(self):
10+
AppleAppStoreSearch.SERP_API_KEY = os.getenv("API_KEY", "demo")
11+
12+
@unittest.skipIf((os.getenv("API_KEY") == None), "no api_key provided")
13+
def test_get_json(self):
14+
search = AppleAppStoreSearch({"term": "Coffee"})
15+
data = search.get_json()
16+
self.assertEqual(data["search_metadata"]["status"], "Success")
17+
#self.assertIsNotNone(data["search_metadata"]["app_store_url"])
18+
self.assertIsNotNone(data["search_metadata"]["id"])
19+
if "organic_results" in data:
20+
self.assertIsNotNone(data["organic_results"][1]["title"])
21+
pp = pprint.PrettyPrinter(indent=2)
22+
pp.pprint(data)
23+
print(data.keys())
24+
25+
if __name__ == '__main__':
26+
unittest.main()

tests/test_duck_duck_go_search.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
import random
2+
import unittest
3+
import os
4+
import pprint
5+
from serpapi import DuckDuckGoSearch
6+
7+
class TestDuckDuckGoSearch(unittest.TestCase):
8+
9+
def setUp(self):
10+
DuckDuckGoSearch.SERP_API_KEY = os.getenv("API_KEY", "demo")
11+
12+
@unittest.skipIf((os.getenv("API_KEY") == None), "no api_key provided")
13+
def test_get_json(self):
14+
search = DuckDuckGoSearch({"q": "Coffee"})
15+
data = search.get_json()
16+
self.assertIsNone(data.get("error"))
17+
self.assertEqual(data["search_metadata"]["status"], "Success")
18+
self.assertIsNotNone(data["search_metadata"]["duckduckgo_url"])
19+
self.assertIsNotNone(data["search_metadata"]["id"])
20+
if "organic_results" in data:
21+
self.assertIsNotNone(data["organic_results"][1]["title"])
22+
# pp = pprint.PrettyPrinter(indent=2)
23+
# pp.pprint(data)
24+
self.assertTrue(len(data.keys()) > 3)
25+
26+
if __name__ == '__main__':
27+
unittest.main()

tests/test_ebay_search.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,30 @@ def test_get_json(self):
1818
self.assertIsNotNone(data["search_metadata"]["ebay_url"])
1919
self.assertIsNotNone(data["search_metadata"]["id"])
2020
self.assertIsNotNone(data["organic_results"][0]["title"])
21+
<<<<<<< Updated upstream
2122
pp = pprint.PrettyPrinter(indent=2)
2223
pp.pprint(data)
24+
=======
25+
# pp = pprint.PrettyPrinter(indent=2)
26+
# pp.pprint(data)
27+
28+
# TODO fix universal pagination
29+
# @unittest.skipIf((os.getenv("API_KEY") == None), "no api_key provided")
30+
# def test_paginate(self):
31+
# params = {
32+
# "_nkw": "coffee",
33+
# "api_key": os.getenv("API_KEY")
34+
# }
35+
# search = EbaySearch(params)
36+
# pages = search.pagination(20, 60)
37+
# page_count = 0
38+
# result_count = 0
39+
# for page in pages:
40+
# page_count += 1
41+
# result_count += len(page["organic_results"])
42+
# self.assertEqual(page_count, 2)
43+
# self.assertEqual(page_count, 40)
44+
>>>>>>> Stashed changes
2345

2446
if __name__ == '__main__':
2547
unittest.main()

0 commit comments

Comments
 (0)