def get_sysinfo(self):
sysinfo = self.rpc.sysinfo(RPC_PWD)
return sysinfo
# client = queues.get_redis_client(QUEUE_REDIS_TYPE, QUEUE_REDIS_HOST, QUEUE_REDIS_PORT, QUEUE_REDIS_DB, QUEUE_REDIS_PWD, QUEUE_REDIS_NODES)
# queues = queues.RedisQueues(conn=client)
# pool = redis.ConnectionPool(host=QUEUE_REDIS_HOST, port=QUEUE_REDIS_PORT, db=QUEUE_REDIS_DB, password=QUEUE_REDIS_PWD)
# client = redis.StrictRedis(connection_pool=pool)
# qi = QueueInfo()
# qi.taskfail_rpush({'taskid': '27050696', 'error': "module 'tasks.spider' has no attribute 'save'"})
# ni = NodeInfo()
# ni.task_list()
python类ConnectionPool()的实例源码
def __init__(self):
self.pool = redis.ConnectionPool(host=RTASK_REDIS_HOST, port=RTASK_REDIS_POST, db=RTASK_REDIS_DB, password=RTASK_REDIS_PWD, encoding='utf-8', decode_responses=True)
self.client = redis.StrictRedis(connection_pool=self.pool)
self.macid = sysinfo.get_macid()
self.ips = sysinfo.get_ips()
self.hostname = sysinfo.get_hostname()
self.platform = sysinfo.get_platform()
def wait_parse_result(keys):
pool = redis.ConnectionPool(host = REDIS_SERVER , port = 6379, db = 0, password = REDIS_PWD)
r = redis.Redis(connection_pool = pool)
spider_json_content = None
while True:
#TODO timeout is need
try:
_ = r.get(keys)
if _ is not None:
spider_json_content = _
r.delete(keys)
break
else:
time.sleep(2)
except Exception:
time.sleep(1)
return spider_json_content
def unix_time_millis(dt):
"""
Convert datetime (dt) object to unix timestamp milliseconds.
:param dt: datetime.datetime
:return: float
"""
naive = dt.replace(tzinfo=None)
epoch = datetime.utcfromtimestamp(0)
return (naive - epoch).total_seconds()
# Use ConnectionPool in order to set decode_response to True.
def get_pool(cls, redis_host, redis_port, redis_db):
"""build a redis connection
:returns: a valid connection
"""
try:
pool = redis.ConnectionPool(host=redis_host, port=redis_port, db=redis_db)
return redis.Redis(connection_pool=pool)
except Exception as e:
logging.error('connection redis error[%s]' % (e))
raise
def __init__(self, name):
""""""
self.poped_name = self.POPED_FMT % (name)
self.pushed_name = self.PUSHED_FMT % (name)
pool = redis.ConnectionPool(host=self.HOST, port=self.PORT, db=self.DB)
self.conn = redis.Redis(connection_=pool)
def getRedisConnection(db):
'''?????????Redis???'''
if db==APBase.REDSI_POOL:
args = settings.REDSI_KWARGS_LPUSH
if settings.REDSI_LPUSH_POOL == None:
settings.REDSI_LPUSH_POOL = redis.ConnectionPool(host=args.get('host'), port=args.get('port'), db=args.get('db'))
pools = settings.REDSI_LPUSH_POOL
connection = redis.Redis(connection_pool=pools)
return connection
def __init__(
self, capacity, tolerant,
redis_conn={}):
size, hash_count = self._get_cap(capacity, tolerant)
self.redis_pool = redis.ConnectionPool(
host=redis_conn.get('host', '127.0.0.1'),
port=redis_conn.get('port', 6379),
db=redis_conn.get('db', 0),
)
self.bf = BloomFilter(hash_count, size)
self.bfkey = redis_conn.get('bfkey', 'bf')
def __init__(self, index_name, host='localhost', port=6379, conn=None):
"""
Create a new Client for the given index_name, and optional host and port
If conn is not None, we employ an already existing redis connection
"""
self.index_name = index_name
self.redis = conn if conn is not None else Redis(
connection_pool=ConnectionPool(host=host, port=port))
def __init__(self, key, host='localhost', port=6379, conn = None):
"""
Create a new AutoCompleter client for the given key, and optional host and port
If conn is not None, we employ an already existing redis connection
"""
self.key = key
self.redis = conn if conn is not None else Redis(
connection_pool = ConnectionPool(host=host, port=port))
def make_redis_handler():
pool = redis.ConnectionPool(host=r_server['ip'], port=r_server['port'],
password=r_server['passwd'])
return redis.Redis(connection_pool=pool)
def make_redis_handler():
pool = redis.ConnectionPool(host=r_server['ip'], port=r_server['port'],
password=r_server['passwd'])
return redis.Redis(connection_pool=pool)
def connect_redis_server(self):
pool = redis.ConnectionPool(host=r_server['ip'], port=r_server['port'], password=['passwd'])
return redis.Redis(connection_pool=pool)
def redis_pool():
pool = redis.ConnectionPool(host='localhost', port=6379)
r = redis.Redis(connection_pool=pool)
r.set('name', 'zhangsan') # ??
print (r.get('name')) # ??
def redis_pipe():
pool = redis.ConnectionPool(host='localhost', port=6379)
r = redis.Redis(connection_pool=pool)
pipe = r.pipeline(transaction=True)
r.set('name', 'zhangsan')
r.set('name', 'lisi')
pipe.execute()
def get_redis():
redis_conf = {
'host': '127.0.0.1',
'port': 6379,
'db': 0
}
pool = redis.ConnectionPool(host=redis_conf['host'], port=redis_conf['port'], db=redis_conf['db'])
return redis.StrictRedis(connection_pool=pool)
def lookup_pool(cls, *args, **kwargs):
key = cls.key(*args, **kwargs)
if key not in cls.pool:
cls.pool[key] = redis.ConnectionPool(*args, **kwargs)
return cls.pool[key]
def __init__(self, name, ttl=None):
"""Init Lock object.
:param name: name of the lock
:param ttl: number of seconds after acquiring when the lock must
be automatically released.
"""
self._redis_con = ConnectionPool().get_connection()
self._lock = None
self.name = self.lock_prefix + name
self.ttl = ttl
def is_acquired(cls, name):
"""Checks if the lock was already acquired and not yet released."""
redis_con = ConnectionPool().get_connection()
name = cls.lock_prefix + name
lock = redis_con.lock(name, 1)
res = False
try:
res = not lock.acquire(blocking=False)
finally:
try:
lock.release()
except redis.lock.LockError:
# exception is raised in case of already released lock
pass
return res
def clean_locks(cls, pattern=None):
"""Removes all locks. Optionally may be specified prefix for lock's
names.
"""
redis_con = ConnectionPool().get_connection()
if pattern:
pattern = cls.lock_prefix + pattern + '*'
else:
pattern = cls.lock_prefix + '*'
keys = list(redis_con.scan_iter(pattern))
if keys:
redis_con.delete(*keys)
def __init__(self, host, port):
self.conn = None
connKey = "%s:%s" % (host, port)
if connKey not in RedisHandler.connPool:
pool = redis.ConnectionPool(host=host, port=int(port))
r = redis.StrictRedis(connection_pool=pool)
RedisHandler.connPool[connKey] = r
self.conn = RedisHandler.connPool[connKey]
self.retry_times = RedisHandler.DEFAULT_RETRY_TIMES
def getRedis(**kwargs):
"""Get the instance of redis."""
pool = redis.ConnectionPool(**kwargs)
return redis.Redis(connection_pool=pool)
def __init__(self, db_host, db_port, db_num, db_pw):
self.pool = ConnectionPool(max_connections=2, db=db_num, host=db_host, port=db_port, password=db_pw,
decode_responses=True)
self.redis = StrictRedis(connection_pool=self.pool)
self.redis.ping()
self._object_map = WeakValueDictionary()
def __init__(self, conf):
"""
:param conf: dict, ?? Redis ? host, port, db
"""
pool = redis.ConnectionPool(
host=conf.get('host', 'localhost'),
port=conf.get('port', 6379),
db=conf.get('db', 0))
self.__db = redis.StrictRedis(connection_pool=pool, socket_timeout=1)
# ??redis???
self.test_connect()
def create_redis():
""" ?? redis ?? """
connection_pool = redis.ConnectionPool(
host=config.REDISDB['host'],
port=config.REDISDB['port'],
db=config.REDISDB['db'],
password=config.REDISDB['password']
)
return redis.Redis(connection_pool=connection_pool)
def _get_proxy():
pool = redis.ConnectionPool(host='172.26.253.91', port=6379)
r = redis.StrictRedis(connection_pool=pool, charset='utf-8')
proxys_str = r.get("0")
print proxys_str
return eval(proxys_str)
def _get_proxy():
pool = redis.ConnectionPool(host='172.26.253.91', port=6379)
r = redis.StrictRedis(connection_pool=pool, charset='utf-8')
proxys_str = r.get("0")
print proxys_str
return eval(proxys_str)
def _get_proxy():
pool = redis.ConnectionPool(host='172.26.253.91', port=6379)
r = redis.StrictRedis(connection_pool=pool, charset='utf-8')
proxys_str = r.get("0")
print proxys_str
return eval(proxys_str)
def __init__(self, config):
BaseCache.__init__(self, config)
self.key_prefix = config.get('key_prefix', '')
try:
import redis
except ImportError:
raise RuntimeError('no redis module found')
kwargs = dict((k, v) for k, v in config.items() if k not in _redis_kwargs_exclusions)
if 'socket_timeout' not in kwargs:
kwargs['socket_timeout'] = _DEFAULT_SOCKET_TIMEOUT
if 'socket_connect_timeout' not in kwargs:
kwargs['socket_connect_timeout'] = _DEFAULT_SOCKET_TIMEOUT
if 'socket_keepalive' not in kwargs:
kwargs['socket_keepalive'] = 1
if 'socket_keepalive_options' not in kwargs:
kwargs['socket_keepalive_options'] = _TCP_KEEP_ALIVE_OPTIONS
if kwargs.pop('blocking_pool', False):
if 'blocking_pool_timeout' in kwargs:
kwargs['timeout'] = kwargs.pop('blocking_pool_timeout')
else:
kwargs['timeout'] = _DEFAULT_REDIS_BLOCKING_POOL_TIMEOUT
connection_pool = redis.BlockingConnectionPool(**kwargs)
else:
connection_pool = redis.ConnectionPool(**kwargs)
self._client = redis.Redis(connection_pool=connection_pool)
def __init__(self):
self._pool = redis.ConnectionPool(**conf.REDIS_CACHE)
self.redis = redis.Redis(connection_pool=self._pool)
self._persist_pool = redis.ConnectionPool(**conf.REDIS_PERSIST)
self.persist_redis = redis.Redis(connection_pool=self._persist_pool)