def preflight_checks():
logger.info('checking aws credentials and region')
if region() is None:
logger.error('Region is not set up. please run aws configure')
return False
try:
check_aws_credentials()
except AttributeError:
logger.error('AWS credentials not found. please run aws configure')
return False
logger.info('testing redis')
try:
from gimel import _redis
_redis().ping()
except redis.exceptions.ConnectionError:
logger.error('Redis ping failed. Please run gimel configure')
return False
return True
python类error()的实例源码
def get_commit_log(self):
"""Get the current commit log
"""
try:
log_object = {}
for key, value in COMMIT_KEYS.items():
stdout, _rc = helpers.run(['git', 'log', '-1', '--pretty=\'%s\'' % value],
self.paths['repo_path'],
self.dryrun)
output = "XXXXX" if self.dryrun else helpers.filter_content(stdout)
if key in consts.RENAME_COMMIT_LOG_KEYS:
key = consts.RENAME_COMMIT_LOG_KEYS[key]
log_object[key] = output
log_object['project'] = self.project
log_object['reponame'] = self.reponame
return log_object
except Exception as e:
logger.errorout("get_commit_log", error="Problem getting commit log",
error_msg=e.message, track=self.track)
def get_template_content(path):
"""Read either yml or json files and store them as dict"""
template_dict = {}
_filename, file_extension = os.path.splitext(path)
file_extension = file_extension.replace('.', '')
if file_extension in consts.TEMPLATING_EXTS:
try:
template_content = {}
abs_path = os.path.abspath(os.path.expandvars(path))
with open(abs_path, 'r') as stream:
if file_extension in consts.JSON_EXTS:
template_content = json.load(stream) #nosec
elif file_extension in consts.YMAL_EXTS:
template_content = yaml.safe_load(stream) #nosec
template_dict.update(template_content)
except Exception as e:
logger.errorout("Error reading templating file",
file=path, error=e.message)
else:
logger.errorout("No templating file found",
file=path)
return template_dict
def run(cmd, working_dir=None, dry_run=False):
"""Runs local cmd command"""
cmd_split = shlex.split(cmd) if isinstance(cmd, basestring) else cmd
if dry_run:
return " ".join(cmd_split), 0
try:
p = Popen(cmd_split, shell=False, stderr=STDOUT, stdout=PIPE, cwd=working_dir)
communicate = p.communicate()
return communicate[0].strip(), p.returncode
except OSError as e:
logger.errorout("Run OSError", error=e.message)
except: # pylint: disable=bare-except
logger.errorout("Run Error")
return
def rollback_lambda(name, alias=LIVE):
all_versions = _versions(name)
live_version = _get_version(name, alias)
try:
live_index = all_versions.index(live_version)
if live_index < 1:
raise RuntimeError('Cannot find previous version')
prev_version = all_versions[live_index - 1]
logger.info('rolling back to version {}'.format(prev_version))
_function_alias(name, prev_version)
except RuntimeError as error:
logger.error('Unable to rollback. {}'.format(repr(error)))
def pull_repo(self, force=False):
"""Clone repo to specified dir. Delete repo if it currently exist unless reuse.
"""
try:
helpers.create_path(self.paths['absolute_path'], True)
if force:
self.delete_repo()
if not os.path.exists(self.paths['repo_path']):
logger.info("Starting Repo Cloning", track=self.track)
output, rc = helpers.run(
"git clone -b %s %s" % (self.branch, self.url),
self.paths['absolute_path'],
self.dryrun)
if rc > 0:
self.delete_repo()
logger.error("Pulling_repo", error=output, path=self.paths['repo_path'])
return -1
return 1
else:
return 0
except Exception as e:
logger.errorout("Pulling_repo", err_msg=e.message,
error="Error pulling repo", path=self.paths['repo_path'])
def set_commit_id(self, commit_id=None):
"""Checks out the commit id for the repo
"""
checkout_id = commit_id if commit_id else self.branch
# Already checked out
if self.prev_commit == checkout_id:
return True
cmd = "git checkout {0}".format(checkout_id)
output, rc = self.run_command(cmd)
if rc > 0:
# Corrupted checkout state, try to recover
logger.warn("Possible corrupted checkout state", desc="Problem with checkout", error=output,
commit_id=checkout_id, path=self.paths['repo_path'],
cmd=cmd, track=self.track)
# Want to guarantee that the branch is completely reset.
git_reset_output, rc = self.run_command("git reset --hard {0}".format(checkout_id)) #pylint: disable=unused-variable
if rc < 1:
# Clean up git so there are no untracked files.
self.run_command("git clean -fd")
if rc > 0:
logger.errorout("set_commit_id", desc="Problem setting commit id", error=output,
commit_id=checkout_id, path=self.paths['repo_path'],
cmd=cmd, track=self.track)
self.prev_commit = checkout_id
return True
def function_importer(mod_str): # pylint: disable=too-complex
"""Import Module from external source"""
mod_split = mod_str.split(":")
if len(mod_split) != 2:
logger.error("Can not import function", mod=mod_str)
return None
mod_path = mod_split[0]
funct_name = mod_split[1].split('.')
path, filename = os.path.split(mod_path)
mod_name, ext = os.path.splitext(filename) # pylint: disable=unused-variable
mod = None
# try to load precompiled in first if it exists
if os.path.exists(os.path.join(path, mod_name)+'.pyc'):
try:
mod = imp.load_compiled(mod_name, mod_path)
except: # pylint: disable=bare-except
pass
if os.path.exists(os.path.join(path, mod_name)+'.py'):
try:
mod = imp.load_source(mod_name, mod_path)
except Exception as e:
logger.error("No Class to import", mod=mod_str, error=e.message)
# Pull function if embedded in classes
for i, mod_part in enumerate(funct_name):
if mod and hasattr(mod, mod_part):
if i == len(funct_name) - 1:
if len(funct_name) > 1:
return getattr(mod(), mod_part)
return getattr(mod, mod_part)
mod = getattr(mod, mod_part)
logger.error("Function not valid/callable", mod=mod_str)
return None
def cmd_check(cmd):
"""Basic check for redirection in command"""
try:
results = next((False for param in shlex.split(cmd)
for rparam in REDIRECT_COMMANDS
if rparam == param), True)
if not results:
logger.warning("Possible injection", cmd=cmd)
except Exception as error:
logger.warning("Possible injection/weirdness", cmd=cmd, error=error.message)
def template_directory(app_path, templating_values):
"""Template files
Walks through all the files a directory and templates any jinja2 values
found.
"""
if not check_path(app_path):
logger.errorout("Can not copy location that does not exist",
path=app_path)
tvalues = merge_templates(templating_values)
for path, _dir, files in os.walk(app_path):
# sort files so logs read better and easier to get status
files.sort()
j2_env = Environment(autoescape=True, loader=FileSystemLoader(path))
for filename in files:
# Should not template version file since it may have
# regex commands that can break templating.
if filename.startswith(consts.VERSIONS_FILENAME):
continue
file_path = os.path.join(path, filename)
try:
file_content = j2_env.get_template(filename).render(tvalues)
with open(file_path, 'w') as f:
f.write(file_content)
except Exception as e:
logger.errorout('Error templating file', file=file_path, error=e.message)
def __exit__(self, type, value, tb): # pylint: disable=redefined-builtin
"""Exit RunSingleInstance class
:return: None
"""
try:
if not self.__is_running:
fcntl.lockf(self.__filelock, fcntl.LOCK_UN)
self.__filelock.close()
os.unlink(self.__lockfile)
except Exception as err:
logger.error("Error unlocking single instance file", error=err.message)
def get_std_error_from_channel(channel):
"""Get std Error from an existing channel"""
stderr = ""
# Make sure we read everything off the error buffer
if channel.recv_stderr_ready():
error_buff = channel.recv_stderr(1024)
while error_buff:
stderr += error_buff
error_buff = channel.recv_stderr(1024)
return stderr
def _error_check(err_msg, remote_file, hostname, function_name):
"""Generic error checker for communication"""
if len(err_msg) > 0:
error_msg = next((err for err in ERROR_MESSAGES if err in err_msg), "Communication Error")
logger.error(error_msg,
function=function_name,
filename=remote_file,
hostname=hostname,
module=COMMAND_MODULE_CUSTOM)
def download_channel(channel_name):
logger.info("tvalacarta.core.updater download_channel('"+channel_name+"')")
# Canal remoto
remote_channel_url , remote_version_url = get_channel_remote_url(channel_name)
# Canal local
local_channel_path , local_version_path , local_compiled_path = get_channel_local_path(channel_name)
# Descarga el canal
updated_channel_data = scrapertools.cachePage( remote_channel_url )
try:
outfile = open(local_channel_path,"w")
outfile.write(updated_channel_data)
outfile.flush()
outfile.close()
logger.info("tvalacarta.core.updater Grabado a " + local_channel_path)
except:
logger.info("tvalacarta.core.updater Error al grabar " + local_channel_path)
import sys
for line in sys.exc_info():
logger.error( "%s" % line )
# Descarga la version (puede no estar)
try:
updated_version_data = scrapertools.cachePage( remote_version_url )
outfile = open(local_version_path,"w")
outfile.write(updated_version_data)
outfile.flush()
outfile.close()
logger.info("tvalacarta.core.updater Grabado a " + local_version_path)
except:
import sys
for line in sys.exc_info():
logger.error( "%s" % line )
if os.path.exists(local_compiled_path):
os.remove(local_compiled_path)
def getSiteCachePath(url):
# Obtiene el dominio principal de la URL
dominio = urlparse.urlparse(url)[1]
logger.debug("[scrapertools.py] dominio="+dominio)
nombres = dominio.split(".")
if len(nombres)>1:
dominio = nombres[len(nombres)-2]+"."+nombres[len(nombres)-1]
else:
dominio = nombres[0]
logger.debug("[scrapertools.py] dominio="+dominio)
# Crea un directorio en la cache para direcciones de ese dominio
siteCachePath = os.path.join( CACHE_PATH , dominio )
if not os.path.exists(CACHE_PATH):
try:
os.mkdir( CACHE_PATH )
except:
logger.error("[scrapertools.py] Error al crear directorio "+CACHE_PATH)
if not os.path.exists(siteCachePath):
try:
os.mkdir( siteCachePath )
except:
logger.error("[scrapertools.py] Error al crear directorio "+siteCachePath)
logger.debug("[scrapertools.py] siteCachePath="+siteCachePath)
return siteCachePath
def verify(msg,compare_object):
'''?????????????Log??Error?????????Exception,??????
:param compare_object: ????
:type compare_object: By.CompareBase
'''
if compare_object.compare() != True:
logger.error(msg, extra={'actual':compare_object.Actual, 'expect':compare_object.Expect})
return False
else:
return True
def verifyTrue(message,actual):
'''????actual?????True
'''
if not isinstance(actual,bool):
raise TypeError("actual type %s is not a bool" % type(actual))
if actual != True:
logger.error(message, extra={'actual':actual, 'expect':True})
return False
return True
def verifyTrueWait(message,actualfunc,actargs,timeout=10,interval=0.5):
'''??interval??actualfunc???????True????timeout???????????????
:param message: ????????
:param actualfunc: ??????????
:param actargs: ???????????
:param timeout: ????
:param interval: ??????
'''
result = _waitForCompareResult(actualfunc,actargs,True,timeout,interval)
if result[0]==False:
logger.error("%s[Timeout:?%d?????%d?]" % (message,timeout,result[1]), extra={'actual':result[2], 'expect':True})
def verifyEqual(message,actual,expect):
'''???????????????????????
:param message: ????
:param actual: ???
:param expect: ???
:return: True or False
'''
if actual != expect:
logger.error(message, extra={'actual':actual, 'expect':expect})
return False
return True
def verifyEqualWait(message,actualfunc,actargs,expect,timeout=10,interval=0.5):
'''??interval?????????????????timeout???????????????
:param message: ????????
:param actualfunc: ??????????
:param actargs: ???????????
:param expect: ???
:param timeout: ????
:param interval: ??????
'''
result = _waitForCompareResult(actualfunc,actargs,expect,timeout,interval)
if result[0]==False:
logger.error("%s[Timeout:?%d?????%d?]" % (message,timeout,result[1]), extra={'actual':result[2], 'expect':expect})
def verifyMatchWait(message,actualfunc,actargs,regexpect,timeout=10,interval=0.5):
'''??interval??actualfunc???????????regexpect????????timeout???????????????
:param message: ????????
:param actualfunc: ??????????
:param actargs: ???????????
:param regexpect: ??????????
:param timeout: ????
:param interval: ??????
:return: True or False
'''
compareobj = lambda x:re.search(regexpect, x)!=None
result = _waitForCompareResult(actualfunc,actargs,compareobj,timeout,interval)
if result[0]==False:
logger.error("%s[Timeout:?%d?????%d?]" % (message,timeout,result[1]), extra={'actual':result[2], 'expect':regexpect})
def verifyCompareFunc(message,actual,comparefunc):
'''????actual???comparefunc???????True??False???????
:param actual: ???
:type actual: tuple or dict or ??????
:param comparefunc: ??????????????????
:return comparefunc: True
'''
actret = _getFuncResult(comparefunc,actual)
if actret != True:
logger.error(message, extra={'actual':actret, 'expect':True})
return False
return True
def verifyCompareFuncWait(message,actualfunc,actargs,comparefunc,timeout=10,interval=0.5):
'''??interval?actualfunc??????comparefunc??????????True????timeout??????True????????
:param message: ????????
:param actualfunc: ??????????
:param actargs: ???????????
:param comparefunc: ??????????????????
:return comparefunc: True
:param timeout: ????
:param interval: ??????
'''
result = _waitForCompareResult(actualfunc,actargs,comparefunc,timeout,interval)
if result[0]==False:
logger.error("%s[Timeout:?%d?????%d?]" % (message,timeout,result[1]), extra={'actual':result[2], 'expect':True})
def verifyPropertyWait(message,obj,prop_name,expect,timeout=10,interval=0.5):
'''??interval??obj.prop_name???expected??????timeout???????????????
:param message: ????????
:param obj: ???????
:type prop_name: string
:param prop_name: ??????????????????
:param expect: ????????
:param timeout: ????
:param interval: ??????
'''
result = _waitForCompareResult(_getObjProperty,{'obj':obj,'prop_name':prop_name},expect,timeout,interval)
if result[0]==False:
logger.error("%s[Timeout:?%d?????%d?]" % (message,timeout,result[1]), extra={'actual':result[2], 'expect':expect})
def load_json(*args, **kwargs):
if "object_hook" not in kwargs:
kwargs["object_hook"] = to_utf8
try:
value = json.loads(*args, **kwargs)
except:
logger.error("**NO** se ha podido cargar el JSON")
logger.error(traceback.format_exc())
value = {}
return value
def dump_json(*args, **kwargs):
if not kwargs:
kwargs = {"indent": 4, "skipkeys": True, "sort_keys": True, "ensure_ascii": False}
try:
value = json.dumps(*args, **kwargs)
except:
logger.error("**NO** se ha podido cargar el JSON")
logger.error(traceback.format_exc())
value = ""
return value
def check_json_file(data, fname, dict_data):
"""
Comprueba que si dict_data(conversion del fichero JSON a dict) no es un diccionario, se genere un fichero con
data de nombre fname.bk.
@param data: contenido del fichero fname
@type data: str
@param fname: nombre del fichero leido
@type fname: str
@param dict_data: nombre del diccionario
@type dict_data: dict
"""
logger.info()
if not dict_data:
logger.error("Error al cargar el json del fichero %s" % fname)
if data != "":
# se crea un nuevo fichero
from core import filetools
title = filetools.write("%s.bk" % fname, data)
if title != "":
logger.error("Ha habido un error al guardar el fichero: %s.bk" % fname)
else:
logger.debug("Se ha guardado una copia con el nombre: %s.bk" % fname)
else:
logger.debug("Está vacío el fichero: %s" % fname)
def download_channel(channel_name):
logger.info(channel_name)
import channeltools
remote_channel_url , remote_version_url = channeltools.get_channel_remote_url(channel_name)
local_channel_path , local_version_path , local_compiled_path = channeltools.get_channel_local_path(channel_name)
# Descarga el canal
try:
updated_channel_data = scrapertools.cachePage( remote_channel_url )
outfile = open(local_channel_path,"wb")
outfile.write(updated_channel_data)
outfile.flush()
outfile.close()
logger.info("Grabado a " + local_channel_path)
except:
import traceback
logger.error(traceback.format_exc())
# Descarga la version (puede no estar)
try:
updated_version_data = scrapertools.cachePage( remote_version_url )
outfile = open(local_version_path,"w")
outfile.write(updated_version_data)
outfile.flush()
outfile.close()
logger.info("Grabado a " + local_version_path)
except:
import traceback
logger.error(traceback.format_exc())
if os.path.exists(local_compiled_path):
os.remove(local_compiled_path)
from platformcode import platformtools
platformtools.dialog_notification(channel_name+" actualizado", "Se ha descargado una nueva versión")
def download_channel(channel_name):
logger.info("streamondemand-pureita.core.updater download_channel('"+channel_name+"')")
# Canal remoto
remote_channel_url , remote_version_url = get_channel_remote_url(channel_name)
# Canal local
local_channel_path , local_version_path , local_compiled_path = get_channel_local_path(channel_name)
# Descarga el canal
updated_channel_data = scrapertools.cache_page( remote_channel_url )
try:
outfile = open(local_channel_path,"w")
outfile.write(updated_channel_data)
outfile.flush()
outfile.close()
logger.info("streamondemand-pureita.core.updater Grabado a " + local_channel_path)
except:
logger.info("streamondemand-pureita.core.updater Error al grabar " + local_channel_path)
import sys
for line in sys.exc_info():
logger.error( "%s" % line )
# Descarga la version (puede no estar)
try:
updated_version_data = scrapertools.cache_page( remote_version_url )
outfile = open(local_version_path,"w")
outfile.write(updated_version_data)
outfile.flush()
outfile.close()
logger.info("streamondemand-pureita.core.updater Grabado a " + local_version_path)
except:
import sys
for line in sys.exc_info():
logger.error( "%s" % line )
if os.path.exists(local_compiled_path):
os.remove(local_compiled_path)
scrapertools_old.py 文件源码
项目:plugin.video.streamondemand-pureita
作者: orione7
项目源码
文件源码
阅读 29
收藏 0
点赞 0
评论 0
def getSiteCachePath(url):
# Obtiene el dominio principal de la URL
dominio = urlparse.urlparse(url)[1]
logger.debug("[scrapertools.py] dominio="+dominio)
nombres = dominio.split(".")
if len(nombres)>1:
dominio = nombres[len(nombres)-2]+"."+nombres[len(nombres)-1]
else:
dominio = nombres[0]
logger.debug("[scrapertools.py] dominio="+dominio)
# Crea un directorio en la cache para direcciones de ese dominio
siteCachePath = os.path.join( CACHE_PATH , dominio )
if not os.path.exists(CACHE_PATH):
try:
os.mkdir( CACHE_PATH )
except:
logger.error("[scrapertools.py] Error al crear directorio "+CACHE_PATH)
if not os.path.exists(siteCachePath):
try:
os.mkdir( siteCachePath )
except:
logger.error("[scrapertools.py] Error al crear directorio "+siteCachePath)
logger.debug("[scrapertools.py] siteCachePath="+siteCachePath)
return siteCachePath