def reconcile_zones(bind=True):
"""
Periodic task that reconciles everything zone-related (zone deletion, policy record updates)
"""
redis_client = redis.from_url(settings.LOCK_SERVER_URL)
lock = redis_client.lock('reconcile_zones', timeout=60)
if not lock.acquire(blocking=False):
logger.info('Cannot aquire task lock. Probaly another task is running. Bailing out.')
return
try:
for zone in models.Zone.need_reconciliation():
try:
zone.reconcile()
lock.extend(5) # extend the lease each time we rebuild a tree
except Exception:
logger.exception(
"reconcile failed for Zone %s.%s", zone, zone.root
)
finally:
lock.release()
python类from_url()的实例源码
def update_ns_propagated(bind=True):
redis_client = redis.from_url(settings.LOCK_SERVER_URL)
# make this lock timeout big enough to cover updating about 1000 zones
# ns_propagated flag and small enough to update the flag in an acceptable
# time frame. 5 minutes sound good at the moment.
lock = redis_client.lock('update_ns_propagated', timeout=300)
if not lock.acquire(blocking=False):
logger.info('Cannot aquire task lock. Probaly another task is running. Bailing out.')
return
try:
models.Zone.update_ns_propagated(delay=getattr(settings, 'ZINC_NS_UPDATE_DELAY', 0.3))
except Exception:
logger.exception("Could not update ns_propagated flag")
lock.release()
def addfromcsv(self):
if os.environ.get("REDIS_URL") :
redis_url = os.environ.get("REDIS_URL")
else:
redis_url = "localhost"
r_server = redis.from_url(redis_url)
with open('mapofinnovation/public/spaces_ready_for_merge.csv', 'rb') as csv_file:
dialect = csv.Sniffer().sniff(csv_file.read(), delimiters=',')
csv_file.seek(0)
csv_reader = csv.DictReader(csv_file, dialect=dialect)
for row in csv_reader:
key = row['name']+str(datetime.now())
row.update({'archived':False})
row.update({'verified':True})
r_server.hmset(re.sub(' ','',key),row)
return {'success':'true'}
def wikipage(self,id=None):
#Return a wiki for the given space
if os.environ.get("REDIS_URL") :
redis_url = os.environ.get("REDIS_URL")
else:
redis_url = "localhost"
r = redis.from_url(redis_url)
if id is None :
return 'Provide a valid space id'
elif r.exists(id):
data = r.hgetall(id)
addresstext = str(data['street_address']).decode("ISO-8859-1")
websitetext = urllib.unquote(data['primary_website']).decode('utf8')
return render('/wikipage.html',extra_vars={'last_updated':str(data['last_updated']),'name':str(data['name']),'status':str(data['status']),'website_url':websitetext,'primarytype':str(data['primary_type']),'secondarytype':'','space_description':str(data['description']),'address':addresstext})
else :
return 'There is no space with this id. Please recheck and submit'
def getAllSpaces(self):
#return all spaces in json format
spaceslist = []
if os.environ.get("REDIS_URL") :
redis_url = os.environ.get("REDIS_URL")
else:
redis_url = "localhost"
r = redis.from_url(redis_url)
for key in r.scan_iter():
row = r.hgetall(key)
space={}
for i in row:
print i
if i in ("image_url", "g_place_id"):
pass
else:
space[i]=unicode(row[i], errors='replace')
spaceslist.append(space)
return spaceslist
def addSpace(self):
#add a space
if os.environ.get("REDIS_URL") :
redis_url = os.environ.get("REDIS_URL")
else:
redis_url = "localhost"
r = redis.from_url(redis_url)
surl = request.params.get("primary_website")
exists = False
if surl is None :
pass
else :
exists = self._search_space(surl)
if exists is False:
tparams=request.params
dparams = {}
for k,v in tparams.items():
dparams.update({k:v})
dparams.update({'archived':False})
dparams.update({'verified':False})
skey = request.params.get("name")+str(datetime.now())
r.hmset(re.sub(' ','',skey),dparams)
return {'sucess':'true'}
def redis(app, config, args, kwargs):
kwargs.update(dict(
host=config.get('CACHE_REDIS_HOST', 'localhost'),
port=config.get('CACHE_REDIS_PORT', 6379),
))
password = config.get('CACHE_REDIS_PASSWORD')
if password:
kwargs['password'] = password
key_prefix = config.get('CACHE_KEY_PREFIX')
if key_prefix:
kwargs['key_prefix'] = key_prefix
db_number = config.get('CACHE_REDIS_DB')
if db_number:
kwargs['db'] = db_number
redis_url = config.get('CACHE_REDIS_URL')
if redis_url:
kwargs['host'] = redis_from_url(
redis_url,
db=kwargs.pop('db', None),
)
return RedisCache(*args, **kwargs)
def gen_markov(): # pragma: no cover
"""Compile all the tweets and create a Markov chain."""
host_url = os.environ.get('REDIS_URL')
access_dict = {'sqlalchemy.url': os.environ.get('DATABASE_URL')}
engine = get_engine(access_dict)
SessionFactory = sessionmaker(bind=engine)
session = SessionFactory()
tweets = session.query(Tweet).all()
big_corpus = ''
for tweet in tweets:
big_corpus += tweet.tweet + '\n'
markov_chain = markovify.NewlineText(big_corpus, state_size=3)
to_redis = pickle.dumps(markov_chain)
redis.from_url(host_url).set('markov_tweets', to_redis)
def gen_markov():
"""Compile all the tweets and create a Markov chain."""
host_url = os.environ.get('REDIS_URL')
access_dict = {'sqlalchemy.url': os.environ.get('DATABASE_URL')}
engine = get_engine(access_dict)
SessionFactory = sessionmaker(bind=engine)
session = SessionFactory()
tweets = session.query(Tweet).all()
big_corpus = ''
for tweet in tweets:
big_corpus += tweet.tweet + '\n'
markov_chain = markovify.NewlineText(big_corpus, state_size=3)
to_redis = pickle.dumps(markov_chain)
redis.from_url(host_url).set('markov_tweets', to_redis)
def __init__(self, data_dir=bqueryd.DEFAULT_DATA_DIR, redis_url='redis://127.0.0.1:6379/0', loglevel=logging.DEBUG):
if not os.path.exists(data_dir) or not os.path.isdir(data_dir):
raise Exception("Datadir %s is not a valid directory" % data_dir)
self.worker_id = binascii.hexlify(os.urandom(8))
self.node_name = socket.gethostname()
self.data_dir = data_dir
self.data_files = set()
context = zmq.Context()
self.socket = context.socket(zmq.ROUTER)
self.socket.setsockopt(zmq.LINGER, 500)
self.socket.identity = self.worker_id
self.poller = zmq.Poller()
self.poller.register(self.socket, zmq.POLLIN | zmq.POLLOUT)
self.redis_server = redis.from_url(redis_url)
self.controllers = {} # Keep a dict of timestamps when you last spoke to controllers
self.check_controllers()
self.last_wrm = 0
self.start_time = time.time()
self.logger = bqueryd.logger.getChild('worker ' + self.worker_id)
self.logger.setLevel(loglevel)
self.msg_count = 0
signal.signal(signal.SIGTERM, self.term_signal())
def __init__(self, address=None, timeout=120, redis_url='redis://127.0.0.1:6379/0', loglevel=logging.INFO, retries=3):
self.logger = bqueryd.logger.getChild('rpc')
self.logger.setLevel(loglevel)
self.context = zmq.Context()
self.redis_url = redis_url
redis_server = redis.from_url(redis_url)
self.retries = retries
self.timeout = timeout
self.identity = binascii.hexlify(os.urandom(8))
if not address:
# Bind to a random controller
controllers = list(redis_server.smembers(bqueryd.REDIS_SET_KEY))
if len(controllers) < 1:
raise Exception('No Controllers found in Redis set: ' + bqueryd.REDIS_SET_KEY)
random.shuffle(controllers)
else:
controllers = [address]
self.controllers = controllers
self.connect_socket()
def _get_cache_client():
"""
# we use bare python redis client to get list of all keys
# since this is not supported in the flask cache
:return: redis connection
"""
config = app.cache.config
cache_type = config['CACHE_TYPE']
if cache_type != 'redis':
print('NOT SUPPORTED CACHE BACKEND, ONLY SUPPORTED IS (redis)')
exit(1)
try:
return redis_from_url(app.cache.config['CACHE_REDIS_URL'])
except:
print('BAD REDIS URL PROVIDED BY (CACHE_BACKEND_URI)')
exit(1)
def redis(app, config, args, kwargs):
kwargs.update(dict(
host=config.get('CACHE_REDIS_HOST', 'localhost'),
port=config.get('CACHE_REDIS_PORT', 6379),
))
password = config.get('CACHE_REDIS_PASSWORD')
if password:
kwargs['password'] = password
key_prefix = config.get('CACHE_KEY_PREFIX')
if key_prefix:
kwargs['key_prefix'] = key_prefix
db_number = config.get('CACHE_REDIS_DB')
if db_number:
kwargs['db'] = db_number
redis_url = config.get('CACHE_REDIS_URL')
if redis_url:
kwargs['host'] = redis_from_url(
redis_url,
db=kwargs.pop('db', None),
)
return RedisCache(*args, **kwargs)
def __init__(self, config):
self.redis = None
self.backend = None
if config['server'].get('redis_url'):
self.redis = redis.from_url(config['server']['redis_url'])
self.redis_chan = config['server']['redis_chan']
self.backend = ServerBackend(self.redis, self.redis_chan)
else:
print('No redis configured, disabling Websockets and remote web console')
self.flask_host = config['server']['host']
self.flask_port = config['server']['port']
self.flask_app = Flask(__name__)
self.flask_app.add_url_rule('/', 'index', self._index)
sockets = Sockets(self.flask_app)
# sockets.add_url_rule('/submit', 'submit', self._inbox)
sockets.add_url_rule('/status', 'status', self._status)
self.console = PushingConsole(self.redis, self.redis_chan, config['server']['terse']) if self.redis else None
def main(msg, config, silent=False):
"""
Job enqueue
:param msg:str
:param config:
:return:
"""
queue_dsn = config["queue"]["dsn"]
redis_conn = redis.from_url(queue_dsn)
q = Queue('low', connection=redis_conn)
ret = q.enqueue(push_messenger, msg, result_ttl=60)
if silent is True:
return ret
else:
print ret
def make_crawler(spider_cls=ATestBaseSpider, **extra_settings):
# clean up queue before starting spider
assert spider_cls.name.startswith('test_'), 'pass a special test spider'
redis_server = redis.from_url('redis://localhost')
name = spider_cls.name
redis_server.delete(
SCHEDULER_DUPEFILTER_KEY % {'spider': name},
*redis_server.keys(
SCHEDULER_QUEUE_KEY % {'spider': name} + '*'))
settings = Settings()
settings.setmodule(dd_crawler.settings)
settings['ITEM_PIPELINES']['tests.utils.CollectorPipeline'] = 100
settings.update(extra_settings)
runner = CrawlerRunner(settings)
return runner.create_crawler(spider_cls)
def __init__(self):
super(RedisBackend, self).__init__()
self._prefix = settings.REDIS_PREFIX
connection_cls = settings.CONNECTION_CLASS
if connection_cls is not None:
self._rd = utils.import_module_attr(connection_cls)()
else:
try:
import redis
except ImportError:
raise ImproperlyConfigured(
"The Redis backend requires redis-py to be installed.")
if isinstance(settings.REDIS_CONNECTION, basestring):
self._rd = redis.from_url(settings.REDIS_CONNECTION)
else:
self._rd = redis.Redis(**settings.REDIS_CONNECTION)
def gettoken(uid):
red = redis.from_url(redis_url)
token = red.get(uid+"-access_token")
refresh = red.get(uid+"-refresh_token")
if token:
return token
elif refresh:
#good refresh token
try:
payload = {"client_id" : Client_ID, "client_secret" : Client_Secret, "refresh_token" : refresh, "grant_type" : "refresh_token", }
url = "https://api.amazon.com/auth/o2/token"
r = requests.post(url, data = payload)
resp = json.loads(r.text)
red.set(uid+"-access_token", resp['access_token'])
red.expire(uid+"-access_token", 3600)
return resp['access_token']
#bad refresh token
except:
return False
else:
return False
#function version of getting Alexa's response in text
def get(self):
code=self.get_argument("code")
mid=self.get_cookie("user")
path = "https" + "://" + self.request.host
callback = path+"/code"
payload = {"client_id" : Client_ID, "client_secret" : Client_Secret, "code" : code, "grant_type" : "authorization_code", "redirect_uri" : callback }
url = "https://api.amazon.com/auth/o2/token"
r = requests.post(url, data = payload)
red = redis.from_url(redis_url)
resp = json.loads(r.text)
if mid != None:
print("fetched MID: ",mid)
red.set(mid+"-access_token", resp['access_token'])
red.expire(mid+"-access_token", 3600)
red.set(mid+"-refresh_token", resp['refresh_token'])
self.render("static/return.html")
bot.send_text_message(mid, "Great, you're logged in. Start talking to Alexa!")
else:
self.redirect("/?refreshtoken="+resp['refresh_token'])
def from_settings(settings):
url = settings.get('REDIS_URL', REDIS_URL)
host = settings.get('REDIS_HOST', REDIS_HOST)
port = settings.get('REDIS_PORT', REDIS_PORT)
# REDIS_URL takes precedence over host/port specification.
if url:
return redis.from_url(url)
else:
return redis.Redis(host=host, port=port)
def from_settings_filter(settings):
url = settings.get('FILTER_URL', FILTER_URL)
host = settings.get('FILTER_HOST', FILTER_HOST)
port = settings.get('FILTER_PORT', FILTER_PORT)
db = settings.get('FILTER_DB', FILTER_DB)
if url:
return redis.from_url(url)
else:
return redis.Redis(host=host, port=port, db=db)
def from_settings(settings):
"""
:param: settings object
:return: Channel object
"""
connection_type = settings.get('RABBITMQ_CONNECTION_TYPE', RABBITMQ_CONNECTION_TYPE)
connection_parameters = settings.get('RABBITMQ_CONNECTION_PARAMETERS', RABBITMQ_CONNECTION_PARAMETERS)
connection = {
'blocking': pika.BlockingConnection,
'libev': pika.LibevConnection,
'select': pika.SelectConnection,
'tornado': pika.TornadoConnection,
'twisted': pika.TwistedConnection
}[connection_type](pika.ConnectionParameters(**connection_parameters))
channel = connection.channel()
channel.basic_qos(prefetch_count=1)
url = settings.get('REDIS_URL', REDIS_URL)
host = settings.get('REDIS_HOST', REDIS_HOST)
port = settings.get('REDIS_PORT', REDIS_PORT)
# REDIS_URL takes precedence over host/port specification.
if url:
redis_server = redis.from_url(url)
else:
redis_server = redis.Redis(host=host, port=port)
return channel, redis_server
def redis_connect(self):
return redis.from_url(os.environ.get('REDIS_URL', 'redis://'))
def setup_rq_connection():
redis_url = current_app.config.get('REDIS_URL')
if redis_url:
current_app.redis_conn = from_url(redis_url)
else:
current_app.redis_conn = Redis(
host=current_app.config.get('REDIS_HOST'),
port=current_app.config.get('REDIS_PORT'),
password=current_app.config.get('REDIS_PASSWORD'),
db=current_app.config.get('REDIS_DB')
)
def __init__(self, key):
self.key = key
self.connection = redis.from_url(REDIS_URL)
def main():
app = Application(urls)
conn = redis.from_url(app.config['redis']['url'])
app.q = Queue(connection=conn)
app.start()
def teardown_method(self, test_method):
r = redis.from_url(REDIS_URL)
r.flushall()
def test_add_command_one_lang(self):
get_dao().add_commands(gen_test_commands(10, 'zh_TW'))
r = redis.from_url(REDIS_URL)
assert 10 == len(r.keys('COMMAND::zh_TW::*'))
def test_add_command_two_lang(self):
get_dao().add_commands(gen_test_commands(10, 'zh_TW'))
get_dao().add_commands(gen_test_commands(20, 'en'))
r = redis.from_url(REDIS_URL)
assert 10 == len(r.keys('COMMAND::zh_TW::*'))
assert 20 == len(r.keys('COMMAND::en::*'))
assert 30 == len(r.keys('COMMAND::*'))
def test_delete_command(self):
self.test_add_command_two_lang()
get_dao().clear_all_command()
r = redis.from_url(REDIS_URL)
assert 0 == len(r.keys('COMMAND::*'))