@@ -7,6 +7,7 @@ from ...client import AuthenticatedClient, Client
77from ...types import Response, UNSET
88from ... import errors
99
10+
1011{% for relative in endpoint .relative_imports | sort %}
1112{{ relative }}
1213{% endfor %}
@@ -22,6 +23,10 @@ from ... import errors
2223{% set parsed_responses = (endpoint .responses | length > 0) and return_string != "Any" %}
2324{% endif %}
2425
26+ {% if endpoint .name .endswith ("_list" ) and parsed_responses and return_string .startswith ("list[" ) %}
27+ from ...utils import parse_link_header
28+ {% endif %}
29+
2530def _get_kwargs(
2631 {{ arguments(endpoint, include_client=False) | indent(4) }}
2732) -> dict[str, Any]:
@@ -165,3 +170,162 @@ async def asyncio(
165170 {{ kwargs(endpoint) }}
166171 )).parsed
167172{% endif %}
173+
174+ {% if endpoint .name .endswith ("_list" ) and parsed_responses and return_string .startswith ("list[" ) %}
175+ def sync_all(
176+ {{ arguments(endpoint, skip_pagination=True) | indent(4) }}
177+ ) -> {{ return_string }}:
178+ """Get All Pages
179+
180+ Fetch all pages of paginated results. This function automatically handles pagination
181+ by following the 'next' link in the Link header until all results are retrieved.
182+
183+ Note: page_size will be set to 100 (the maximum allowed) automatically.
184+
185+ Args:
186+ {% set all_parameters = endpoint .list_all_parameters () %}
187+ {% if all_parameters %}
188+ {% for parameter in all_parameters %}
189+ {% if parameter .name not in ['page' , 'page_size' ] %}
190+ {{ parameter.to_docstring() | wordwrap(90) | indent(8) }}
191+ {% endif %}
192+ {% endfor %}
193+ {% endif %}
194+
195+ Raises:
196+ errors.UnexpectedStatus: If the server returns an undocumented status code.
197+ httpx.TimeoutException: If the request takes longer than Client.timeout.
198+
199+ Returns:
200+ {{ return_string }}: Combined results from all pages
201+ """
202+ from urllib.parse import urlencode, parse_qs, urlparse, urlunparse
203+
204+ all_results{{ ":" if return_string.startswith("list[") else " =" }} {{ return_string.replace("list[", "list[") if return_string.startswith("list[") else return_string }} = []
205+
206+ # Get initial request kwargs
207+ kwargs = _get_kwargs(
208+ {{ kwargs(endpoint, include_client=False, skip_pagination=True) }}
209+ )
210+
211+ # Set page_size to maximum
212+ if "params" not in kwargs:
213+ kwargs["params"] = {}
214+ kwargs["params"]["page_size"] = 100
215+
216+ # Make initial request
217+ response = client.get_httpx_client().request(**kwargs)
218+ parsed_response = _parse_response(client=client, response=response)
219+
220+ if parsed_response:
221+ all_results.extend(parsed_response)
222+
223+ # Follow pagination links
224+ while True:
225+ link_header = response.headers.get("Link", "")
226+ links = parse_link_header(link_header)
227+
228+ if "next" not in links:
229+ break
230+
231+ # Extract page number from next URL
232+ next_url = links["next"]
233+ parsed_url = urlparse(next_url)
234+ next_params = parse_qs(parsed_url.query)
235+
236+ if "page" not in next_params:
237+ break
238+
239+ # Update only the page parameter, keep all other params
240+ page_number = next_params["page"][0]
241+ kwargs["params"]["page"] = page_number
242+
243+ # Fetch next page
244+ response = client.get_httpx_client().request(**kwargs)
245+ parsed_response = _parse_response(client=client, response=response)
246+
247+ if parsed_response:
248+ all_results.extend(parsed_response)
249+
250+ return all_results
251+
252+
253+ async def asyncio_all(
254+ {{ arguments(endpoint, skip_pagination=True) | indent(4) }}
255+ ) -> {{ return_string }}:
256+ """Get All Pages (Async)
257+
258+ Fetch all pages of paginated results asynchronously. This function automatically handles pagination
259+ by following the 'next' link in the Link header until all results are retrieved.
260+
261+ Note: page_size will be set to 100 (the maximum allowed) automatically.
262+
263+ Args:
264+ {% set all_parameters = endpoint .list_all_parameters () %}
265+ {% if all_parameters %}
266+ {% for parameter in all_parameters %}
267+ {% if parameter .name not in ['page' , 'page_size' ] %}
268+ {{ parameter.to_docstring() | wordwrap(90) | indent(8) }}
269+ {% endif %}
270+ {% endfor %}
271+ {% endif %}
272+
273+ Raises:
274+ errors.UnexpectedStatus: If the server returns an undocumented status code.
275+ httpx.TimeoutException: If the request takes longer than Client.timeout.
276+
277+ Returns:
278+ {{ return_string }}: Combined results from all pages
279+ """
280+ from urllib.parse import urlencode, parse_qs, urlparse, urlunparse
281+
282+ all_results{{ ":" if return_string.startswith("list[") else " =" }} {{ return_string.replace("list[", "list[") if return_string.startswith("list[") else return_string }} = []
283+
284+ # Get initial request kwargs
285+ kwargs = _get_kwargs(
286+ {{ kwargs(endpoint, include_client=False, skip_pagination=True) }}
287+ )
288+
289+ # Set page_size to maximum
290+ if "params" not in kwargs:
291+ kwargs["params"] = {}
292+ kwargs["params"]["page_size"] = 100
293+
294+ # Make initial request
295+ response = await client.get_async_httpx_client().request(**kwargs)
296+ parsed_response = _parse_response(client=client, response=response)
297+
298+ if parsed_response:
299+ all_results.extend(parsed_response)
300+
301+ # Follow pagination links
302+ while True:
303+ link_header = response.headers.get("Link", "")
304+ links = parse_link_header(link_header)
305+
306+ if "next" not in links:
307+ break
308+
309+ # Extract page number from next URL
310+ next_url = links["next"]
311+ parsed_url = urlparse(next_url)
312+ next_params = parse_qs(parsed_url.query)
313+
314+ if "page" not in next_params:
315+ break
316+
317+ # Update only the page parameter, keep all other params
318+ page_number = next_params["page"][0]
319+ kwargs["params"]["page"] = page_number
320+
321+ # Fetch next page
322+ response = await client.get_async_httpx_client().request(**kwargs)
323+ parsed_response = _parse_response(client=client, response=response)
324+
325+ if parsed_response:
326+ all_results.extend(parsed_response)
327+
328+ return all_results
329+ {% endif %}
330+
331+
0 commit comments