def start(self):
# Throw errors if backend it not properly configured
if self.backend not in ('requests', 'tornado'):
raise TransportException('Invalid HTTP backend: %s', self.backend)
if self.backend == 'requests' and not HAS_REQUESTS:
raise TransportException('Trying to use Requests as backend, but it is not installed')
if self.backend == 'tornado' and not HAS_TORNADO:
raise TransportException('Trying to use Tornado as backend, but it is not installed')
# Prepare the tornado backend
if self.backend == 'tornado':
self.tornado_client = tornado.httpclient.AsyncHTTPClient(max_clients=self.max_clients)
elif self.backend == 'requests':
# When using requests, we start a threaded pool
# with the size specified using max_clients.
# Tornado already has this feature built-in.
# We could just start a single thread per request,
# but this leads to infinite number of threads started withing
# the Publiser processes. Python is really really bad to collect
# the garbage from continuous processes, and this can lead
# to memory leaks.
# So we'll queue the messages to be publishe
self._publish_queue = queue.Queue() # TODO: use opts hwm
self._pool = []
for index in range(self.max_clients):
thread = threading.Thread(target=self._publish_requests)
thread.daemon = True
thread.start()
self._pool.append(thread)
评论列表
文章目录