def logout(self):
"""Explicit Abode logout."""
if self._token:
header_data = {
'ABODE-API-KEY': self._token
}
self._session = requests.session()
self._token = None
self._panel = None
self._user = None
self._devices = None
self._automations = None
try:
response = self._session.post(
CONST.LOGOUT_URL, headers=header_data)
response_object = json.loads(response.text)
except OSError as exc:
_LOGGER.warning("Caught exception during logout: %s", str(exc))
return False
if response.status_code != 200:
raise AbodeAuthenticationException(
(response.status_code, response_object['message']))
_LOGGER.debug("Logout Response: %s", response.text)
_LOGGER.info("Logout successful")
return True
python类Session()的实例源码
def pinned_session(pool_maxsize=8):
http_adapter = _SSLAdapter(pool_connections=4, pool_maxsize=pool_maxsize)
_session = requests.session()
_session.mount('https://', http_adapter)
return _session
def zw_init(url='https://127.0.0.1/', user='test_user', pswd='test_password'):
zwareGlobals.zwareSession = requests.session()
zwareGlobals.zwareUrl = url
zwareGlobals.zwareSession.headers.update({'Content-Type':'application/x-www-form-urlencoded'}) # apache requires this
zw_api('register/login.php', 'usrname='+ user + '&passwd=' + pswd)
zwareGlobals.zwareUrl += 'cgi/zcgi/networks//'
return zw_api('zw_version')
def main_craw_ptt(i,ptt_class_name,sql_name,bo):
#ptt_class_name = 'Soft_Job'
index_name = 'http://www.ptt.cc'
index_class = '/bbs/' + ptt_class_name + '/index'
# i=4806, i=18
index_url = index_name + index_class +str(i)+'.html' # ?? i ???
#res = requests.get(index_url,verify = False)
res = requests.get(index_url,verify = True) # ?? html ???
soup = BeautifulSoup(res.text, "lxml")# html???? ???????
temp = soup.find_all("",{'class':'r-ent'})
for i in range( len( temp ) ): # i=0 len( temp )
#print(i)
temp2 = temp[i].find('a')
if( str( temp2 ) == 'None' ):# ??????? ? return error, ??????????
print('error')
elif( str( temp2 ) != 'None' ):# ???????
#print(i)
article_url = temp[i].find('a')['href']# ?????
article_url = index_name+article_url# ? index ??
title = temp[i].find('a').get_text()# ? title
# article_url = 'https://www.ptt.cc/bbs/Soft_Job/M.1503652456.A.526.html'
response = requests.session().get( article_url )#response, ????, 200????
if( response.status_code == 404 ):
print(404)
elif( re.search('[??]',title) ):# ????
print('[??]')
elif( response.status_code == 200 ):# 200????
if(bo == 'new'):# ??data, ??????,
# max date time ??, ??sql?? max time, ??????, ??
date_time = catch_ptt_history_date_time(ptt_class_name,sql_name)
max_date_time = date_time
elif(bo == 'his'):# ?????, ???????, ?????
max_date_time = 0
tem = craw_ptt_data_fun(article_url,temp,i,index_url,sql_name,max_date_time,bo)
else:
print('other')
#---------------------------------------------------------------------------------
# ???? data, ????????? index=100 ?, ?5????error, ???6?????
def fix_data(i,ptt_class_name,sql_name,bo,j):
#ptt_class_name = 'Soft_Job'
index_name = 'http://www.ptt.cc'
index_class = '/bbs/' + ptt_class_name + '/index'
# i=4806, i=18
index_url = index_name + index_class +str(i)+'.html'
#res = requests.get(index_url,verify = False)
# index_url = 'http://www.ruten.com.tw/'
res = requests.get(index_url,verify = True)
soup = BeautifulSoup(res.text, "lxml")
temp = soup.find_all("",{'class':'r-ent'})
for i in range( j,len( temp ) ): # i=12 len( temp )
#print(i)
temp2 = temp[i].find('a')
if( str( temp2 ) == 'None' ):
print('error')
elif( str( temp2 ) != 'None' ):
#print(i)
article_url = temp[i].find('a')['href']
article_url = index_name+article_url
title = temp[i].find('a').get_text()
# article_url = 'https://www.ptt.cc/bbs/Soft_Job/M.1503652456.A.526.html'
response = requests.session().get( article_url )
if( response.status_code == 404 ):
print(404)
elif( re.search('[??]',title) ):
print('[??]')
elif( response.status_code == 200 ):
if(bo == 'new'):
date_time = catch_ptt_history_date_time(ptt_class_name,sql_name)
max_date_time = max(date_time)
elif(bo == 'his'):
max_date_time = 0
tem = craw_ptt_data_fun(article_url,temp,i,index_url,sql_name,max_date_time,bo)
else:
print('other')
#---------------------------------------------------------------------------------
# ?????, ??????
def __init__(self, instance, server_login, dedi_code, path, pack_mask, server_version, server_build, game='TM2'):
"""
Initiate dedi api.
:param instance: ControllerInstance
:param server_login: .
:param dedi_code: .
:param path: .
:param pack_mask: .
:param server_version: .
:param server_build: .
:param game: Game info
:type instance: pyplanet.core.instance.Instance
"""
self.instance = instance
self.loop = instance.loop
self.client = requests.session()
self.headers = {
'User-Agent': 'PyPlanet/{}'.format(version),
'Accept': 'text/xml',
'Accept-Encoding': 'gzip',
'Content-Type': 'text/xml; charset=UTF-8',
'Content-Encoding': 'gzip',
'Keep-Alive': 'timeout=600, max=2000',
'Connection': 'Keep-Alive',
}
self.server_login = server_login
self.dedimania_code = dedi_code
self.path = path
self.pack_mask = pack_mask
self.server_version = server_version
self.server_build = server_build
self.game = game
self.update_task = None
self.session_id = None
self.retries = 0
def create_google_session(self):
"""Summary
Returns:
TYPE: Description
"""
session = requests.session ()
login_html = session.get ( DataManagement.__GOOGLE_ACCOUNT_URL )
#Check cookies returned because there is an issue with the authentication
#GAPS , GALX , NID - These cookies are used to identify the user when using Google + functionality.
#GAPS is still provided
self.logger.debug(session.cookies.get_dict ().keys ())
try:
galx = session.cookies['GALX']
except:
self.logger.error('No cookie GALX')
soup_login = BeautifulSoup ( login_html.content , 'html.parser' ).find ( 'form' ).find_all ( 'input' )
payload = {}
for u in soup_login:
if u.has_attr ( 'value' ):
payload[u['name']] = u['value']
payload['Email'] = self.__username
payload['Passwd'] = self.__password
auto = login_html.headers.get ( 'X-Auto-Login' )
follow_up = unquote ( unquote ( auto ) ).split ( 'continue=' )[-1]
#Commented as suggested in https://github.com/tracek/gee_asset_manager/issues/36
#galx = login_html.cookies['GALX']
payload['continue'] = follow_up
# Commented as suggested in https://github.com/tracek/gee_asset_manager/issues/36
#payload['GALX'] = galx
session.post ( DataManagement.__AUTHENTICATION_URL , data=payload )
return session
def __upload_image(self,file_path,session,upload_url,image_name,properties,nodata):
"""Summary
Args:
file_path (TYPE): Description
session (TYPE): Description
upload_url (TYPE): Description
image_name (TYPE): Description
properties (TYPE): Description
nodata (TYPE): Description
"""
with open ( file_path , 'rb' ) as f:
files = {'file': f}
resp = session.post ( upload_url , files=files )
gsid = resp.json ()[0]
asset_data = {"id": image_name ,
"tilesets": [
{"sources": [
{"primaryPath": gsid ,
"additionalPaths": []}
]}
] ,
"bands": [] ,
"properties": properties ,
"missingData": {"value": nodata}
}
task_id = ee.data.newTaskId ( 1 )[0]
_ = ee.data.startIngestion ( task_id , asset_data )
def data_management(self,session,upload_url,assets_names,file_path, properties, nodata):
"""Summary
Args:
session (TYPE): Description
upload_url (TYPE): Description
assets_names (TYPE): Description
file_path (TYPE): Description
properties (TYPE): Description
nodata (TYPE): Description
"""
file_root = file_path.split ( "/" )[-1].split ( "." )[0]
image_name = self.asset_path + '/%s' % file_root
already_uploaded = False
if file_root in assets_names:
self.logger.error("%s already in collection" % file_root)
already_uploaded = True
#if name file already in that asset it throws an error
if os.path.exists ( file_path ) and not already_uploaded:
self.__upload_image(file_path , session , upload_url , image_name,properties,nodata)
else:
self.logger.debug('%s already uploaded in GEE - Deleting old file' % file_root)
self.__delete_image(image_name)
self.__upload_image ( file_path , session , upload_url , image_name , properties , nodata )
def __init__(self):
self.inventory = defaultdict(list) # A list of groups and the hosts in that group
self.cache = dict() # Details about hosts in the inventory
self.params = dict() # Params of each host
self.facts = dict() # Facts of each host
self.hostgroups = dict() # host groups
self.session = None # Requests session
self.config_paths = [
"/etc/ansible/foreman.ini",
os.path.dirname(os.path.realpath(__file__)) + '/foreman.ini',
]
env_value = os.environ.get('FOREMAN_INI_PATH')
if env_value is not None:
self.config_paths.append(os.path.expanduser(os.path.expandvars(env_value)))