Skip to content

Commit 3d5a4c5

Browse files
author
Kenneth Reitz
committed
generators!
1 parent 34d1db4 commit 3d5a4c5

1 file changed

Lines changed: 60 additions & 11 deletions

File tree

github3/api.py

Lines changed: 60 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919

2020
from decorator import decorator
2121

22+
PAGING_SIZE = 100
2223

2324
class GithubCore(object):
2425

@@ -27,7 +28,7 @@ class GithubCore(object):
2728

2829
def __init__(self):
2930
self.session = requests.session()
30-
self.session.params = {'per_page': 100}
31+
self.session.params = {'per_page': PAGING_SIZE}
3132

3233

3334
@staticmethod
@@ -71,7 +72,6 @@ def _requests_post_hook(self, r):
7172
def _http_resource(self, verb, endpoint, params=None, **etc):
7273

7374
url = self._generate_url(endpoint)
74-
7575
args = (verb, url)
7676

7777
if params:
@@ -83,8 +83,6 @@ def _http_resource(self, verb, endpoint, params=None, **etc):
8383
r = self.session.request(*args, **kwargs)
8484
r = self._requests_post_hook(r)
8585

86-
# print self._ratelimit_remaining
87-
8886
r.raise_for_status()
8987

9088
return r
@@ -104,17 +102,68 @@ def _patch_resource(self, resource, data, **kwargs):
104102
return msg
105103

106104

107-
def _get_resources(self, resource, obj, **kwargs):
105+
@staticmethod
106+
def _total_pages_from_header(link_header):
107+
from urlparse import urlparse, parse_qs
108+
page_info = {}
108109

109-
r = self._http_resource('GET', resource, params=kwargs)
110-
d_items = self._resource_deserialize(r.content)
110+
for link in link_header.split(','):
111+
112+
uri, meta = map(str.strip, link.split(';'))
113+
114+
# Strip <>'s
115+
uri = uri[1:-1]
116+
117+
# Get query params from header.
118+
q = parse_qs(urlparse(uri).query)
119+
meta = meta[5:-1]
120+
121+
page_info[meta] = q
122+
123+
try:
124+
return int(page_info['last']['page'].pop())
125+
except KeyError:
126+
return True
127+
128+
def _get_resources(self, resource, obj, limit=None, **kwargs):
129+
130+
if limit is not None:
131+
assert limit > 0
132+
133+
moar = True
134+
is_truncated = (limit > PAGING_SIZE) or (limit is None)
135+
r_count = 0
136+
page = 1
137+
138+
while moar:
139+
140+
if not is_truncated:
141+
kwargs['per_page'] = limit
142+
moar = False
143+
else:
144+
kwargs['page'] = page
145+
if limit:
146+
if (limit - r_count) < PAGING_SIZE:
147+
kwargs['per_page'] = (limit - r_count)
148+
moar = False
149+
150+
151+
r = self._http_resource('GET', resource, params=kwargs)
152+
max_page = self._total_pages_from_header(r.headers['link'])
153+
154+
if max_page is True:
155+
moar = False
111156

112-
items = []
157+
d_items = self._resource_deserialize(r.content)
113158

114-
for item in d_items:
115-
items.append(obj.new_from_dict(item, gh=self))
159+
for item in d_items:
160+
if (r_count < limit) or (limit is None):
161+
r_count += 1
162+
yield obj.new_from_dict(item, gh=self)
163+
else:
164+
moar = False
116165

117-
return items
166+
page += 1
118167

119168

120169
def _to_map(self, obj, iterable):

0 commit comments

Comments
 (0)