def get_cl_statuses(changes, fine_grained, max_processes=None):
"""Returns a blocking iterable of (cl, status) for given branches.
If fine_grained is true, this will fetch CL statuses from the server.
Otherwise, simply indicate if there's a matching url for the given branches.
If max_processes is specified, it is used as the maximum number of processes
to spawn to fetch CL status from the server. Otherwise 1 process per branch is
spawned.
See GetStatus() for a list of possible statuses.
"""
# Silence upload.py otherwise it becomes unwieldy.
upload.verbosity = 0
if fine_grained:
# Process one branch synchronously to work through authentication, then
# spawn processes to process all the other branches in parallel.
if changes:
def fetch(cl):
try:
return (cl, cl.GetStatus())
except:
# See http://crbug.com/629863.
logging.exception('failed to fetch status for %s:', cl)
raise
yield fetch(changes[0])
changes_to_fetch = changes[1:]
if not changes_to_fetch:
# Exit early if there was only one branch to fetch.
return
pool = ThreadPool(
min(max_processes, len(changes_to_fetch))
if max_processes is not None
else max(len(changes_to_fetch), 1))
fetched_cls = set()
it = pool.imap_unordered(fetch, changes_to_fetch).__iter__()
while True:
try:
row = it.next(timeout=5)
except multiprocessing.TimeoutError:
break
fetched_cls.add(row[0])
yield row
# Add any branches that failed to fetch.
for cl in set(changes_to_fetch) - fetched_cls:
yield (cl, 'error')
else:
# Do not use GetApprovingReviewers(), since it requires an HTTP request.
for cl in changes:
yield (cl, 'waiting' if cl.GetIssueURL() else 'error')
评论列表
文章目录