def client_proc(computation, njobs, task=None):
# schedule computation with the scheduler; scheduler accepts one computation
# at a time, so if scheduler is shared, the computation is queued until it
# is done with already scheduled computations
if (yield computation.schedule()):
raise Exception('Could not schedule computation')
# run jobs
for i in range(njobs):
# computation is supposed to be CPU bound so 'run' is used so at most
# one computations runs at a server at any time; for mostly idle
# computations, use 'run_async' to run more than one computation at a
# server at the same time.
rtask = yield computation.run(compute, random.uniform(5, 10))
if isinstance(rtask, pycos.Task):
print(' job %s processed by %s' % (i, rtask.location))
else:
print('rtask %s failed: %s' % (i, rtask))
# wait for all jobs to be done and close computation
yield computation.close()
评论列表
文章目录