def get(self, url, query, cache):
payloads_file = (self.tmpdir + "/" +
hashlib.sha1(url.encode('utf-8')).hexdigest() +
".json")
if (not cache or not os.access(payloads_file, 0) or
time.time() - os.stat(payloads_file).st_mtime > 24 * 60 * 60):
payloads = []
next_query = query
while next_query:
log.debug(str(next_query))
result = requests.get(url, params=next_query)
payloads += result.json()
next_query = None
for link in result.headers.get('Link', '').split(','):
if 'rel="next"' in link:
m = re.search('<(.*)>', link)
if m:
parsed_url = parse.urlparse(m.group(1))
# append query in case it was not preserved
# (gitlab has that problem)
next_query = query
next_query.update(
dict(parse.parse_qsl(parsed_url.query))
)
if cache:
with open(payloads_file, 'w') as f:
json.dump(payloads, f)
else:
with open(payloads_file, 'r') as f:
payloads = json.load(f)
return payloads
评论列表
文章目录