Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update readme docs #36

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
685 changes: 307 additions & 378 deletions README.md

Large diffs are not rendered by default.

37 changes: 37 additions & 0 deletions examples/pagination/appstore-reviews-pagination-example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# docs: https://serpapi.com/apple-app-store

from serpapi import AppleAppStoreSearch
from urllib.parse import (parse_qsl, urlsplit)

params = {
"api_key": "...", # your serpapi api key
"engine": "apple_reviews", # search engine
"product_id": "479516143" # product query ID
}

search = AppleAppStoreSearch(params) # where data extraction happens

# show the page number
page_num = 0

# iterate over all pages
while True:
results = search.get_dict() # JSON -> Python dict

if "error" in results:
print(results["error"])
break

page_num += 1
print(f"Current page: {page_num}")

# iterate over organic results and extract the data
for result in results.get("reviews", []):
print(result["position"], result["title"], result["review_date"], sep="\n")

# check if the next page key is present in the JSON
# if present -> split URL in parts and update to the next page
if "next" in results.get("serpapi_pagination", {}):
search.params_dict.update(dict(parse_qsl(urlsplit(results.get("serpapi_pagination", {}).get("next")).query)))
else:
break
40 changes: 40 additions & 0 deletions examples/pagination/appstore-search-pagination-example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# docs: https://serpapi.com/apple-app-store

from serpapi import AppleAppStoreSearch
from urllib.parse import (parse_qsl, urlsplit)

params = {
"api_key": "...", # your serpapi api key
"engine": "apple_app_store", # search engine
"term": "minecraft", # search query
"country": "us", # country of to search from
"lang": "en-us", # language
"num": "200" # number of results per page
}

search = AppleAppStoreSearch(params) # where data extraction happens

# to show the page number
page_num = 0

# iterate over all pages
while True:
results = search.get_dict() # JSON -> Python dict

if "error" in results:
print(results["error"])
break

page_num += 1
print(f"Current page: {page_num}")

# iterate over organic results and extract the data
for result in results.get("organic_results", []):
print(result["position"], result["title"], sep="\n")

# check if the next page key is present in the JSON
# if present -> split URL in parts and update to the next page
if "next" in results.get("serpapi_pagination", {}):
search.params_dict.update(dict(parse_qsl(urlsplit(results.get("serpapi_pagination", {}).get("next")).query)))
else:
break
38 changes: 38 additions & 0 deletions examples/pagination/baidu-search-pagination-example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# docs: https://serpapi.com/baidu-search-api

from serpapi import BaiduSearch
from urllib.parse import (parse_qsl, urlsplit)

params = {
"api_key": "...", # your serpapi api key
"engine": "baidu", # search engine
"q": "minecraft redstone" # search query
# other parameters
}

search = BaiduSearch(params) # where data extraction happens

# to show the page number
page_num = 0

# iterate over all pages
while True:
results = search.get_dict() # JSON -> Python dict

if "error" in results:
print(results["error"])
break

page_num += 1
print(f"Current page: {page_num}")

# iterate over organic results and extract the data
for result in results["organic_results"]:
print(result["position"], result["title"], sep="\n")

# check if the next page key is present in the JSON
# if present -> split URL in parts and update to the next page
if "next" in results.get("serpapi_pagination", {}):
search.params_dict.update(dict(parse_qsl(urlsplit(results.get("serpapi_pagination", {}).get("next")).query)))
else:
break
33 changes: 33 additions & 0 deletions examples/pagination/bing-search-pagination-example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# docs: https://serpapi.com/bing-search-api

from serpapi import BingSearch
from urllib.parse import (parse_qsl, urlsplit)

params = {
"api_key": "...", # your serpapi api key
"engine": "bing", # parsing engine
"q": "brabus", # search query
"device": "desktop", # device used for search
"mkt": "en-us", # language of the search
"count": "50" # number of results per page. 50 is the maximum
}

search = BingSearch(params) # where data extraction happens

while True:
results = search.get_dict() # JSON -> Python dict

if "error" in results:
print(results["error"])
break

# iterate over organic results and extract the data
for result in results["organic_results"]:
print(result["position"], result["title"], sep="\n")

# check if the next page key is present in the JSON
# if present -> split URL in parts and update to the next page
if "next" in results.get("serpapi_pagination", {}):
search.params_dict.update(dict(parse_qsl(urlsplit(results.get("serpapi_pagination", {}).get("next")).query)))
else:
break
29 changes: 29 additions & 0 deletions examples/pagination/duckduck-go-pagination-example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# docs: https://serpapi.com/duckduckgo-search-api

from serpapi import GoogleSearch # will be changed to DuckDuckGoSearch later

params = {
"api_key": "...", # your serpapi api key
"engine": "duckduckgo", # search engine
"q": "minecraft redstone", # search query
"kl": "us-en" # language
}

search = GoogleSearch(params) # where data extraction happens
pages = search.pagination() # paginating over all pages

# to show the page number
page_num = 0

for page in pages:

# checks for "Duckduckgo hasn't retured anything for this query..."
if "error" in page:
print(page["error"])
break

page_num += 1
print(f"Page number: {page_num}")

for result in page["organic_results"]:
print(result["title"])
48 changes: 48 additions & 0 deletions examples/pagination/ebay-search-pagination-example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# Docs: https://serpapi.com/ebay-search-api

from serpapi import EbaySearch
from urllib.parse import (parse_qsl, urlsplit)

params = {
"api_key": "...", # serpapi api key
"engine": "ebay", # search engine
"ebay_domain": "ebay.com", # ebay domain
"_nkw": "minecraft redstone", # search query
# other params
}

search = EbaySearch(params) # where data extraction happens

page_num = 0

while True:
results = search.get_dict() # JSON -> Python dict

if "error" in results:
print(results["error"])
break

for organic_result in results.get("organic_results", []):
title = organic_result.get("title")
print(title)

page_num += 1
print(page_num)

# {'_nkw': 'minecraft redstone', '_pgn': '19', 'engine': 'ebay'}
next_page_query_dict = dict(parse_qsl(urlsplit(results["serpapi_pagination"]["next"]).query))
current_page = results["serpapi_pagination"]["current"] # 1,2,3...

# looks for the next page data (_pgn):
# {'_nkw': 'minecraft redstone', '_pgn': '19', 'engine': 'ebay'}
if "next" in results.get("pagination", {}):

# if current_page = 20 and next_page_query_dict["_pgn"] = 20: break
if int(current_page) == int(next_page_query_dict["_pgn"]):
break

# update next page data
search.params_dict.update(next_page_query_dict)
else:
break

40 changes: 40 additions & 0 deletions examples/pagination/google-images-pagination-example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# Docs: https://serpapi.com/images-results

from serpapi import GoogleSearch
from urllib.parse import (parse_qsl, urlsplit)

params = {
"api_key": "...", # your serpapi api key
"engine": "google", # search engine
"q": "minecraft redstone", # search query
"gl": "us", # country of the search
"hl": "en", # language
"ijn": 0, # page number: 0 -> first page, 1 -> second...
"tbm": "isch" # image results
}

search = GoogleSearch(params) # where data extraction happens

# to show the page number
page_num = 0

image_results = []

while True:
results = search.get_dict() # # JSON -> Python dictionary

page_num += 1
print(f"Current page: {page_num}")

# checks for "Google hasn't returned any results for this query."
if "error" not in results:
for image in results.get("images_results", []):
if image["original"] not in image_results:
print(image["original"])
image_results.append(image["original"])

# update to the next page
params["ijn"] += 1
else:
print(results["error"])
break
42 changes: 42 additions & 0 deletions examples/pagination/google-jobs-pagination-example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
# docs: https://serpapi.com/google-jobs-api

from serpapi import GoogleSearch
import json

params = {
"api_key": "...", # serpapi api key
"engine": "google_jobs", # parsing engine
"google_domain": "google.com", # google domain for the search
"q": "Barista", # search query
"start": 0 # page number
}

search = GoogleSearch(params) # where data extraction happens on the backend

jobs_data = []

# to show page number
page_num = 0

while True:
results = search.get_dict() # JSON -> Python dict

# checks for "Google hasn't returned any results for this query."
if "error" in results:
print(results["error"])
break

page_num += 1
print(f"Current page: {page_num}")

# iterate over organic results and extract the data
for result in results["jobs_results"]:
jobs_data.append({
"title": result["title"],
"company_name": result["company_name"],
"location": result["location"]
})

params["start"] += 10

print(json.dumps(jobs, indent=2))
38 changes: 38 additions & 0 deletions examples/pagination/google-local-results-pagination-example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# docs: https://serpapi.com/local-results

from serpapi import GoogleSearch
from urllib.parse import (parse_qsl, urlsplit)

params = {
"api_key": "...", # serpapi api key
"engine": "google", # search engine
"q": "minecraft", # search query
"gl": "us", # country of the search
"hl": "en", # language
"tbm": "lcl" # local results
}

search = GoogleSearch(params) # where data extraction happens

# to show the page number
page_num = 0

# iterate over all pages
while True:
results = search.get_dict() # JSON -> Python dict

if "error" in results:
print(results["error"])
break

page_num += 1
print(f"Current page: {page_num}")

# iterate over organic results and extract the data
for result in results.get("local_results", []):
print(result.get("position"), result.get("title"), result.get("address"), sep="\n")

if results.get("serpapi_pagination", {}).get("next"):
search.params_dict.update(dict(parse_qsl(urlsplit(results.get("serpapi_pagination").get("next")).query)))
else:
break
Loading