Improve pagination handling in proposals.py
Previously if multiple pages were returned then the script would lose the results. A check to ensure the response is an array was also added. This helps avoid somewhat cryptic errors when iterating over non-lists (eg: error messages, bad output, etc)
This commit is contained in:
parent
44db84f3d8
commit
3a9cfd490f
1 changed files with 8 additions and 3 deletions
|
@ -12,13 +12,18 @@ authors = set()
|
|||
prs = set()
|
||||
|
||||
def getpage(url, page):
|
||||
resp = requests.get(url + str(page))
|
||||
url = url + str(page)
|
||||
resp = requests.get(url)
|
||||
|
||||
for link in resp.links.values():
|
||||
if link['rel'] == 'last':
|
||||
pagecount = re.search('page=(.+?)', link['url']).group(1)
|
||||
|
||||
return resp.json()
|
||||
val = resp.json()
|
||||
if not isinstance(val, list):
|
||||
print(val) # Just dump the raw (likely error) response to the log
|
||||
raise Exception("Error calling %s" % url)
|
||||
return val
|
||||
|
||||
def getbylabel(label):
|
||||
pagecount = 1
|
||||
|
@ -27,7 +32,7 @@ def getbylabel(label):
|
|||
print(urlbase)
|
||||
json.extend(getpage(urlbase, 1))
|
||||
for page in range(2, int(pagecount) + 1):
|
||||
getpage(urlbase, page)
|
||||
json.extend(getpage(urlbase, page))
|
||||
|
||||
return json
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue