Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
feat: Introduce request timeout handling and allow passing requests-s…
…pecific arguments to API calls, along with updating documentation and removing the README template.
  • Loading branch information
jvmvik committed Feb 16, 2026
commit daef303e902b34fcb051022d9df6f0b6e6b84be5
8 changes: 6 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,12 +52,13 @@ The [SerpApi.com API Documentation](https://serpapi.com/search-api) contains a l

### Error handling

Unsuccessful requests raise `serpapi.HTTPError` exception. The returned status code will reflect the sort of error that occurred, please refer to [Status and Error Codes Documentation](https://serpapi.com/api-status-and-error-codes) for more details.
Unsuccessful requests raise `serpapi.HTTPError` or `serpapi.TimeoutError` exceptions. The returned status code will reflect the sort of error that occurred, please refer to [Status and Error Codes Documentation](https://serpapi.com/api-status-and-error-codes) for more details.

```python
import serpapi

client = serpapi.Client(api_key=os.getenv("API_KEY"))
# A default timeout can be set here.
client = serpapi.Client(api_key=os.getenv("API_KEY"), timeout=10)

try:
results = client.search({
Expand All @@ -71,6 +72,9 @@ except serpapi.HTTPError as e:
pass
elif e.status_code == 429: # Exceeds the hourly throughput limit OR account run out of searches
pass
except serpapi.TimeoutError as e:
# Handle timeout
print(f"The request timed out: {e}")
```

## Documentation
Expand Down
165 changes: 0 additions & 165 deletions README.md.erb

This file was deleted.

34 changes: 31 additions & 3 deletions serpapi/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ class Client(HTTPClient):

DASHBOARD_URL = "https://serpapi.com/dashboard"

def __init__(self, *, api_key=None, timeout=None):
super().__init__(api_key=api_key, timeout=timeout)

def __repr__(self):
return "<SerpApi Client>"

Expand Down Expand Up @@ -60,10 +63,16 @@ def search(self, params: dict = None, **kwargs):
if params is None:
params = {}

# These are arguments that should be passed to the underlying requests.request call.
request_kwargs = {}
for key in ["timeout", "proxies", "verify", "stream", "cert"]:
if key in kwargs:
request_kwargs[key] = kwargs.pop(key)
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Duplicated request kwargs extraction across four methods

Low Severity

The same request_kwargs extraction loop (iterating over ["timeout", "proxies", "verify", "stream", "cert"] and popping from kwargs) is copy-pasted identically in search, search_archive, locations, and account. Extracting this into a small helper method would reduce duplication and ensure the list of forwarded keys stays consistent if it ever changes.

Additional Locations (2)

Fix in Cursor Fix in Web


if kwargs:
params.update(kwargs)

r = self.request("GET", "/search", params=params)
r = self.request("GET", "/search", params=params, **request_kwargs)

return SerpResults.from_http_response(r, client=self)

Expand All @@ -80,6 +89,12 @@ def search_archive(self, params: dict = None, **kwargs):
if params is None:
params = {}

# These are arguments that should be passed to the underlying requests.request call.
request_kwargs = {}
for key in ["timeout", "proxies", "verify", "stream", "cert"]:
if key in kwargs:
request_kwargs[key] = kwargs.pop(key)

if kwargs:
params.update(kwargs)

Expand All @@ -90,7 +105,7 @@ def search_archive(self, params: dict = None, **kwargs):
f"Please provide 'search_id', found here: { self.DASHBOARD_URL }"
)

r = self.request("GET", f"/searches/{ search_id }", params=params)
r = self.request("GET", f"/searches/{ search_id }", params=params, **request_kwargs)
return SerpResults.from_http_response(r, client=self)

def locations(self, params: dict = None, **kwargs):
Expand All @@ -106,6 +121,12 @@ def locations(self, params: dict = None, **kwargs):
if params is None:
params = {}

# These are arguments that should be passed to the underlying requests.request call.
request_kwargs = {}
for key in ["timeout", "proxies", "verify", "stream", "cert"]:
if key in kwargs:
request_kwargs[key] = kwargs.pop(key)

if kwargs:
params.update(kwargs)

Expand All @@ -114,6 +135,7 @@ def locations(self, params: dict = None, **kwargs):
"/locations.json",
params=params,
assert_200=True,
**request_kwargs,
)
return r.json()

Expand All @@ -129,10 +151,16 @@ def account(self, params: dict = None, **kwargs):
if params is None:
params = {}

# These are arguments that should be passed to the underlying requests.request call.
request_kwargs = {}
for key in ["timeout", "proxies", "verify", "stream", "cert"]:
if key in kwargs:
request_kwargs[key] = kwargs.pop(key)

if kwargs:
params.update(kwargs)

r = self.request("GET", "/account.json", params=params, assert_200=True)
r = self.request("GET", "/account.json", params=params, assert_200=True, **request_kwargs)
return r.json()


Expand Down
6 changes: 6 additions & 0 deletions serpapi/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,3 +43,9 @@ class HTTPConnectionError(HTTPError, requests.exceptions.ConnectionError, SerpAp
"""Connection Error."""

pass


class TimeoutError(requests.exceptions.Timeout, SerpApiError):
"""Timeout Error."""

pass
10 changes: 9 additions & 1 deletion serpapi/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from .exceptions import (
HTTPError,
HTTPConnectionError,
TimeoutError,
)
from .__version__ import __version__

Expand All @@ -13,10 +14,11 @@ class HTTPClient:
BASE_DOMAIN = "https://serpapi.com"
USER_AGENT = f"serpapi-python, v{__version__}"

def __init__(self, *, api_key=None):
def __init__(self, *, api_key=None, timeout=None):
# Used to authenticate requests.
# TODO: do we want to support the environment variable? Seems like a security risk.
self.api_key = api_key
self.timeout = timeout
self.session = requests.Session()

def request(self, method, path, params, *, assert_200=True, **kwargs):
Expand All @@ -34,12 +36,18 @@ def request(self, method, path, params, *, assert_200=True, **kwargs):
try:
headers = {"User-Agent": self.USER_AGENT}

# Use the default timeout if one was provided to the client.
if self.timeout and "timeout" not in kwargs:
kwargs["timeout"] = self.timeout
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Truthiness check ignores falsy timeout values

Low Severity

The condition if self.timeout uses a truthiness check, which means a timeout value of 0 (or 0.0) would be treated as falsy and not applied. The check needs to be if self.timeout is not None to correctly distinguish between "no timeout set" and "timeout explicitly set to zero."

Fix in Cursor Fix in Web


r = self.session.request(
method=method, url=url, params=params, headers=headers, **kwargs
)

except requests.exceptions.ConnectionError as e:
raise HTTPConnectionError(e)
except requests.exceptions.Timeout as e:
raise TimeoutError(e)
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Timeout handler unreachable for connection timeouts

High Severity

The except requests.exceptions.ConnectionError clause is placed before except requests.exceptions.Timeout. Since requests.exceptions.ConnectTimeout is a subclass of both ConnectionError and Timeout, any connection timeout will be caught by the first handler and raised as HTTPConnectionError instead of TimeoutError. The Timeout except clause needs to come first so that timeouts are properly distinguished from other connection errors.

Fix in Cursor Fix in Web


# Raise an exception if the status code is not 200.
if assert_200:
Expand Down
39 changes: 39 additions & 0 deletions tests/test_timeout.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import pytest
import requests
from serpapi import Client

def test_client_timeout_setting():
"""Test that timeout can be set on the client and is passed to the request."""
client = Client(api_key="test_key", timeout=10)
assert client.timeout == 10

def test_request_timeout_override(monkeypatch):
"""Test that timeout can be overridden in the search method."""
client = Client(api_key="test_key", timeout=10)

def mock_request(method, url, params, headers, timeout, **kwargs):
assert timeout == 5
# Return a mock response object
mock_response = requests.Response()
mock_response.status_code = 200
mock_response._content = b'{"search_metadata": {"id": "123"}}'
return mock_response

monkeypatch.setattr(client.session, "request", mock_request)

client.search(q="coffee", timeout=5)

def test_request_default_timeout(monkeypatch):
"""Test that the client's default timeout is used if none is provided in search."""
client = Client(api_key="test_key", timeout=10)

def mock_request(method, url, params, headers, timeout, **kwargs):
assert timeout == 10
mock_response = requests.Response()
mock_response.status_code = 200
mock_response._content = b'{"search_metadata": {"id": "123"}}'
return mock_response

monkeypatch.setattr(client.session, "request", mock_request)

client.search(q="coffee")