def info(c):
"Return a dictionary with all info on the last response."
m = {}
m['effective-url'] = c.getinfo(pycurl.EFFECTIVE_URL)
m['http-code'] = c.getinfo(pycurl.HTTP_CODE)
m['total-time'] = c.getinfo(pycurl.TOTAL_TIME)
m['namelookup-time'] = c.getinfo(pycurl.NAMELOOKUP_TIME)
m['connect-time'] = c.getinfo(pycurl.CONNECT_TIME)
m['pretransfer-time'] = c.getinfo(pycurl.PRETRANSFER_TIME)
m['redirect-time'] = c.getinfo(pycurl.REDIRECT_TIME)
m['redirect-count'] = c.getinfo(pycurl.REDIRECT_COUNT)
# m['size-upload'] = c.getinfo(pycurl.SIZE_UPLOAD)
m['size-download'] = c.getinfo(pycurl.SIZE_DOWNLOAD)
# m['speed-upload'] = c.getinfo(pycurl.SPEED_UPLOAD)
m['header-size'] = c.getinfo(pycurl.HEADER_SIZE)
m['request-size'] = c.getinfo(pycurl.REQUEST_SIZE)
m['content-length-download'] = c.getinfo(pycurl.CONTENT_LENGTH_DOWNLOAD)
m['content-length-upload'] = c.getinfo(pycurl.CONTENT_LENGTH_UPLOAD)
m['content-type'] = c.getinfo(pycurl.CONTENT_TYPE)
m['response-code'] = c.getinfo(pycurl.RESPONSE_CODE)
m['speed-download'] = c.getinfo(pycurl.SPEED_DOWNLOAD)
# m['ssl-verifyresult'] = c.getinfo(pycurl.SSL_VERIFYRESULT)
m['filetime'] = c.getinfo(pycurl.INFO_FILETIME)
m['starttransfer-time'] = c.getinfo(pycurl.STARTTRANSFER_TIME)
m['redirect-time'] = c.getinfo(pycurl.REDIRECT_TIME)
m['redirect-count'] = c.getinfo(pycurl.REDIRECT_COUNT)
m['http-connectcode'] = c.getinfo(pycurl.HTTP_CONNECTCODE)
# m['httpauth-avail'] = c.getinfo(pycurl.HTTPAUTH_AVAIL)
# m['proxyauth-avail'] = c.getinfo(pycurl.PROXYAUTH_AVAIL)
# m['os-errno'] = c.getinfo(pycurl.OS_ERRNO)
m['num-connects'] = c.getinfo(pycurl.NUM_CONNECTS)
# m['ssl-engines'] = c.getinfo(pycurl.SSL_ENGINES)
# m['cookielist'] = c.getinfo(pycurl.INFO_COOKIELIST)
# m['lastsocket'] = c.getinfo(pycurl.LASTSOCKET)
# m['ftp-entry-path'] = c.getinfo(pycurl.FTP_ENTRY_PATH)
return m
python类SIZE_DOWNLOAD的实例源码
def info(self):
"Return a dictionary with all info on the last response."
m = {}
m['effective-url'] = self.handle.getinfo(pycurl.EFFECTIVE_URL)
m['http-code'] = self.handle.getinfo(pycurl.HTTP_CODE)
m['total-time'] = self.handle.getinfo(pycurl.TOTAL_TIME)
m['namelookup-time'] = self.handle.getinfo(pycurl.NAMELOOKUP_TIME)
m['connect-time'] = self.handle.getinfo(pycurl.CONNECT_TIME)
m['pretransfer-time'] = self.handle.getinfo(pycurl.PRETRANSFER_TIME)
m['redirect-time'] = self.handle.getinfo(pycurl.REDIRECT_TIME)
m['redirect-count'] = self.handle.getinfo(pycurl.REDIRECT_COUNT)
m['size-upload'] = self.handle.getinfo(pycurl.SIZE_UPLOAD)
m['size-download'] = self.handle.getinfo(pycurl.SIZE_DOWNLOAD)
m['speed-upload'] = self.handle.getinfo(pycurl.SPEED_UPLOAD)
m['header-size'] = self.handle.getinfo(pycurl.HEADER_SIZE)
m['request-size'] = self.handle.getinfo(pycurl.REQUEST_SIZE)
m['content-length-download'] = self.handle.getinfo(pycurl.CONTENT_LENGTH_DOWNLOAD)
m['content-length-upload'] = self.handle.getinfo(pycurl.CONTENT_LENGTH_UPLOAD)
m['content-type'] = self.handle.getinfo(pycurl.CONTENT_TYPE)
m['response-code'] = self.handle.getinfo(pycurl.RESPONSE_CODE)
m['speed-download'] = self.handle.getinfo(pycurl.SPEED_DOWNLOAD)
m['ssl-verifyresult'] = self.handle.getinfo(pycurl.SSL_VERIFYRESULT)
m['filetime'] = self.handle.getinfo(pycurl.INFO_FILETIME)
m['starttransfer-time'] = self.handle.getinfo(pycurl.STARTTRANSFER_TIME)
m['redirect-time'] = self.handle.getinfo(pycurl.REDIRECT_TIME)
m['redirect-count'] = self.handle.getinfo(pycurl.REDIRECT_COUNT)
m['http-connectcode'] = self.handle.getinfo(pycurl.HTTP_CONNECTCODE)
m['httpauth-avail'] = self.handle.getinfo(pycurl.HTTPAUTH_AVAIL)
m['proxyauth-avail'] = self.handle.getinfo(pycurl.PROXYAUTH_AVAIL)
m['os-errno'] = self.handle.getinfo(pycurl.OS_ERRNO)
m['num-connects'] = self.handle.getinfo(pycurl.NUM_CONNECTS)
m['ssl-engines'] = self.handle.getinfo(pycurl.SSL_ENGINES)
m['cookielist'] = self.handle.getinfo(pycurl.INFO_COOKIELIST)
m['lastsocket'] = self.handle.getinfo(pycurl.LASTSOCKET)
m['ftp-entry-path'] = self.handle.getinfo(pycurl.FTP_ENTRY_PATH)
return m
def curl_result(c):
effective_url = c.getinfo(pycurl.EFFECTIVE_URL)
primary_ip = c.getinfo(pycurl.PRIMARY_IP)
primary_port = c.getinfo(pycurl.PRIMARY_PORT)
local_ip = c.getinfo(pycurl.LOCAL_IP)
local_port = c.getinfo(pycurl.LOCAL_PORT)
speed_download = c.getinfo(pycurl.SPEED_DOWNLOAD)
size_download = c.getinfo(pycurl.SIZE_DOWNLOAD)
redirect_time = c.getinfo(pycurl.REDIRECT_TIME)
redirect_count = c.getinfo(pycurl.REDIRECT_COUNT)
redirect_url = c.getinfo(pycurl.REDIRECT_URL)
http_code = c.getinfo(pycurl.HTTP_CODE)
response_code = c.getinfo(pycurl.RESPONSE_CODE)
total_time = c.getinfo(pycurl.TOTAL_TIME)
content_type = c.getinfo(pycurl.CONTENT_TYPE)
namelookup_time = c.getinfo(pycurl.NAMELOOKUP_TIME)
info_filetime = c.getinfo(pycurl.INFO_FILETIME)
http_connectcode = c.getinfo(pycurl.HTTP_CONNECTCODE)
starttransfer_time = c.getinfo(pycurl.STARTTRANSFER_TIME)
pretransfer_time = c.getinfo(pycurl.PRETRANSFER_TIME)
header_size = c.getinfo(pycurl.HEADER_SIZE)
request_size = c.getinfo(pycurl.REQUEST_SIZE)
ssl_verifyresult = c.getinfo(pycurl.SSL_VERIFYRESULT)
num_connects = c.getinfo(pycurl.NUM_CONNECTS)
return {
'effective_url': effective_url,
'primary_ip': primary_ip,
'primary_port': primary_port,
'local_ip': local_ip,
'local_port': local_port,
'speed_download': speed_download,
'size_download': size_download,
'redirect_time': redirect_time,
'redirect_count': redirect_count,
'redirect_url': redirect_url,
'http_code': http_code,
'response_code': response_code,
'total_time': total_time,
'content_type': content_type,
'namelookup_time': namelookup_time,
'info_filetime': info_filetime,
'http_connectcode': http_connectcode,
'starttransfer_time': starttransfer_time,
'pretransfer_time': pretransfer_time,
'header_size': header_size,
'request_size': request_size,
'ssl_verifyresult': ssl_verifyresult,
'num_connects': num_connects,
# 'proxy_ssl_verifyresult': proxy_ssl_verifyresult,
# 'app_connecttime': app_connecttime,
}
def __check_for_stalls(self):
"""In some situations, libcurl can get itself
tied in a knot, and fail to make progress. Check that the
active handles are making progress. If none of the active
handles have downloaded any content for the timeout period,
reset the transport and generate exceptions for the failed
requests."""
timeout = global_settings.PKG_CLIENT_LOWSPEED_TIMEOUT
if timeout == 0:
return
current_time = time.time()
time_list = []
size_list = []
failures = []
q_hdls = [
hdl for hdl in self.__chandles
if hdl not in self.__freehandles
]
# time.time() is based upon system clock. Check that
# our time hasn't been set backwards. If time is set forward,
# we'll have to expire the handles. There's no way to detect
# this until python properly implements gethrtime(). Solaris
# implementations of time.clock() appear broken.
for h in q_hdls:
time_elapsed = current_time - h.starttime
if time_elapsed < 0:
h.starttime = current_time
time_elapsed = 0
size_xfrd = h.getinfo(pycurl.SIZE_DOWNLOAD) + \
h.getinfo(pycurl.SIZE_UPLOAD)
time_list.append(time_elapsed)
size_list.append(size_xfrd)
# If timeout is smaller than smallest elapsed time,
# and no data has been transferred, abort.
if timeout < min(time_list) and max(size_list) == 0:
for h in q_hdls:
url = h.url
uuid = h.uuid
urlstem = h.repourl
ex = tx.TransportStallError(url,
repourl=urlstem, uuid=uuid)
self.__mhandle.remove_handle(h)
self.__teardown_handle(h)
self.__freehandles.append(h)
failures.append(ex)
self.__failures.extend(failures)