def run_script(script, remote=True):
''' Run a script. '''
custom_scripts = _get_config()['scripts']
# If the script is not defined raise error.
if not is_script_defined(script):
raise RuntimeError('Missing script "{}"'.format(script))
# Get the command defined in the script.
script_cmd = custom_scripts[script]
info_text = 'Running {}\n{}'.format(
cyan(script), cyan('> ' + script_cmd)
)
host_info(info_text, remote=remote)
# Run a custom script defined in the config.
with hide('running'):
run(script_cmd, remote)
python类hide()的实例源码
def install_jenkins(*args, **kwargs):
home = run('echo $HOME')
version = kwargs.get('version', 'latest')
init = os.path.join(home,'init')
jenkins_base_dir = os.path.join(home, 'jenkins')
jenkins_init = os.path.join(init, 'jenkins')
port = kwargs.get('port')
if not exists(jenkins_base_dir):
run('mkdir ' + jenkins_base_dir)
if not exists(os.path.join(jenkins_base_dir, 'jenkins.war')):
with hide('output'):
run('wget http://mirrors.jenkins-ci.org/war/%s/jenkins.war -O ~/jenkins/jenkins.war' % version)
if not exists(os.path.join(jenkins_base_dir, 'org.jenkinsci.main.modules.sshd.SSHD.xml')):
with hide('output'):
run('wget https://templates.wservices.ch/jenkins/org.jenkinsci.main.modules.sshd.SSHD.xml -O ~/jenkins/org.jenkinsci.main.modules.sshd.SSHD.xml')
if not exists(init):
run('mkdir ~/init')
if not exists(jenkins_init):
with hide('output'):
run('wget https://templates.wservices.ch/jenkins/jenkins.init -O ~/init/jenkins')
run('chmod 750 ~/init/jenkins')
sed(jenkins_init, 'PORT=HTTP_PORT', 'PORT=%s' % port)
run('~/init/jenkins start')
else:
run('~/init/jenkins restart')
def load_config(conf_file, base_conf=[], spec_conf=[], delimiter=' '):
if exists(conf_file):
with hide('output'):
config_data = run('cat %s' % conf_file)
else:
config_data = ''
confs = base_conf + spec_conf
for conf in confs:
param, value = conf.split(delimiter, 1)
value = re.sub(r'#.*$', "", str(value)) # Delete comments
match = re.search('^%s[ ]?%s[ ]?(.*)' % (param, delimiter), config_data, re.MULTILINE)
if match:
orig_value = match.group(1).strip()
orig_line = '%s' % match.group(0).strip()
if orig_value != str(value):
if config_data and param in spec_conf:
continue # Do not override already existing specific configurations
print('%s %s change to %s' % (param, orig_value, value))
sed(conf_file, orig_line, '%s%s%s' % (param, delimiter, value))
else:
print('Config OK: %s%s%s' % (param, delimiter, value))
else:
print('Add config %s%s%s' % (param, delimiter, value))
append(conf_file, '%s%s%s' % (param, delimiter, value))
def vulture():
""" try to find dead code paths """
with api.quiet():
if not api.local('which vulture').succeeded:
print 'vulture not found, installing it'
api.local('pip install vulture')
ignore_functions_grep = 'egrep -v "{0}"'.format(
'|'.join(VULTURE_IGNORE_FUNCTIONS))
excluded = ",".join(VULTURE_EXCLUDE_PATHS)
excluded_paths = (' --exclude ' + excluded) if excluded else ''
vulture_cmd = '\n vulture {pkg_name}{exclude}{pipes}'
vulture_cmd = vulture_cmd.format(
pkg_name=PKG_NAME,
exclude=excluded_paths,
pipes='|'.join(['', ignore_functions_grep]))
changedir = api.lcd(os.path.dirname(__file__))
warn_only = api.settings(warn_only=True)
be_quit = api.hide('warnings')
with contextlib.nested(changedir, warn_only, be_quit):
result = api.local(vulture_cmd, capture=True)
exit_code = result.return_code
print result.strip()
raise SystemExit(exit_code)
def list_platforms(root_dir):
"""
???????????????
"""
def is_platform(dir):
"""
????version.lua???????????????????
"""
with quiet():
return run('test -f "{}/{}/version.lua"'.format(root_dir, dir)).succeeded
with cd(root_dir), hide('stdout'):
result = run('''find ./ -mindepth 1 -maxdepth 1 -type d -print |grep --color=never -vE '([0-9]+(\.[0-9]+){3}\\b)|(lyServers)' ''')
dirs = [each.lstrip('./') for each in result.splitlines()]
return [each for each in dirs if is_platform(each)]
def make_diff(remote_script_dir, diff_from_lua, diff_to_lua, resource_dir, dest):
"""
???????????????????????with cd(dir): ???????
Example:
/app/opbak/make_diff_3/make_diff.py --resource-dir 3.6.1.0/res --diff-from 3.6.0.9/res/res.lua --diff-to 3.6.1.0/res/res.lua --dest /app/opbak/make_diff_20150909_xxxxx/3.6.1.0,/app/opbak/make_diff_20150909_xxxxx/3.6.1.0.zip
"""
with hide('running', 'stdout'):
run('''python {remote_script_dir}/make_diff.py --resource-dir {resource_dir} --diff-from {diff_from_lua} --diff-to {diff_to_lua} --dest {dest}'''.format(remote_script_dir=remote_script_dir, resource_dir=resource_dir, diff_from_lua=diff_from_lua, diff_to_lua=diff_to_lua, dest=dest))
#?????????.lua??
_zipfile = dest.split(',')[0]
zipfile = _zipfile.rstrip('.zip')
zip_lua = '{}.lua'.format(zipfile)
with hide('running', 'stdout'):
file_size = run('stat --printf="%s" {}'.format(zipfile))
md5 = run("md5sum {} | awk '{{print $1}}'".format(zipfile)).strip('\n')
run('''echo -ne 'local updateZipSize = {{}}\nupdateZipSize.value = {file_size}\nupdateZipSize.md5 = "{md5}"\nreturn updateZipSize' >{zip_lua}'''.format(file_size=file_size, md5=md5, zip_lua=zip_lua))
def build(name, ask=True, **kwargs):
"""
Build the malicious mote to its target hardware.
:param name: experiment name (or absolute path to experiment)
:param ask: ask confirmation
:param path: expanded path of the experiment (dynamically filled in through 'command' decorator with 'expand')
:param kwargs: simulation keyword arguments (see the documentation for more information)
"""
def is_device_present():
with settings(hide(*HIDDEN_ALL), warn_only=True):
return local("if [ -c /dev/ttyUSB0 ]; then echo 'ok'; else echo 'nok'; fi", capture=True) == 'ok'
console = kwargs.get('console')
counter, interval = 0.0, 0.5
while not is_device_present():
sleep(interval)
counter += interval
if counter % 5 == 0:
logger.warning("Waiting for mote to be detected...")
elif counter >= 120:
logger.error("Something failed with the mote ; check that it mounts to /dev/ttyUSB0")
return
remake(name, build=True, **kwargs) if console is None else console.do_remake(name, build=True, **kwargs)
return "Mote built on /dev/ttyUSB0"
def _command(
fabric_method,
command,
ignore_errors=False,
quiet=True,
hide=('running', 'aborts'),
show=(),
abort_exception=RuntimeError,
**kwargs
):
if quiet:
hide += ('output', 'warnings')
log('{method}: {command}'.format(
method=fabric_method.__name__,
command=command,
))
with fab.settings(
fab.hide(*hide),
fab.show(*show),
abort_exception=abort_exception,
warn_only=ignore_errors,
):
return fabric_method(command, **kwargs)
def create_db():
with settings(warn_only=True), hide('output', 'running'):
if env.get('settings'):
execute('servers.stop_service', 'uwsgi')
with shell_env(**app_config.database):
local('dropdb --if-exists %s' % app_config.database['PGDATABASE'])
if not env.get('settings'):
local('psql -c "DROP USER IF EXISTS %s;"' % app_config.database['PGUSER'])
local('psql -c "CREATE USER %s WITH SUPERUSER PASSWORD \'%s\';"' % (app_config.database['PGUSER'], app_config.database['PGPASSWORD']))
with shell_env(**app_config.database):
local('createdb %s' % app_config.database['PGDATABASE'])
if env.get('settings'):
execute('servers.start_service', 'uwsgi')
gobgp_monitoring_worker.py 文件源码
项目:sdwan_orchestrator_with_gobgp
作者: ttsubo
项目源码
文件源码
阅读 18
收藏 0
点赞 0
评论 0
def _monitor_neighbor(self):
with hide('running', 'stdout'):
while True:
eventlet.sleep(1)
try:
results = {}
cmd = 'gobgp -j neighbor -u {0}'.format(self.mgmt_addr)
output = local(cmd, capture=True)
ret = json.loads(output)
for i in range(len(ret)):
addr = ret[i]['conf']['remote_ip']
state = ret[i]['info']['bgp_state']
results[addr] = state
change_result_list = self._extract_change_state(results)
if change_result_list != []:
result_queue.put(change_result_list)
except:
continue
def request_spot_instances(
price=0.01, zone='us-west-2a', inst_type=INSTANCE_TYPE,
inst_id="ami-9abea4fb"):
' request spot instances '
# ami-9abea4fb - ubuntu-trusty-14.04-amd64-server
launch_specification = '''
{{
"ImageId": "{}",
"KeyName": "ubuntu_trusty",
"SecurityGroupIds": [ "sg-94a671f3" ],
"InstanceType": "{}"
}}
'''.format(inst_id, inst_type)
ls_encode = launch_specification.replace('\n', '')
ls_encode = ls_encode.replace('"', '\\"')
aws_cmd = 'aws ec2 request-spot-instances --spot-price "{}" --instance-count 1 --type "one-time" --availability-zone-group {} --launch-specification "{}"'
aws_cmd = aws_cmd.format(price, zone, ls_encode)
jq_cmd = 'jq -c -C "."'
with hide("running"):
local('|'.join([aws_cmd, jq_cmd]))
def tar_archive(name, path, remote=True):
''' Compress the source path into a tar archive. '''
cmd = 'tar -czvf {} {}'.format(name, path)
with hide('stdout'):
runner.run(cmd, remote=remote)
def glob(path, remote=True):
''' Glob a directory path to get the list of files. '''
with hide('everything'):
return runner.run('ls -1 {}'.format(path), remote=remote).split()
def load_history():
''' Load build history. '''
with hide('everything'):
data = fs.read_remote_file(get_builds_file())
return json.loads(data)
def check():
''' Check the current remote branch and the last commit. '''
with hide('running'):
# Show the current branch
remote_branch = git.current_branch()
remote_print('Branch: {}'.format(remote_branch))
# Show the last commit
git.show_last_commit()
def last_commit(remote=True, short=False):
'''
Get the last commit of the git repository.
Note: This assumes the current working directory (on remote or local host)
to be a git repository. So, make sure current directory is set before using this.
'''
cmd = 'git rev-parse{}HEAD'.format(' --short ' if short else ' ')
with hide('everything'):
result = run(cmd) if remote else local(cmd, capture=True)
return result.strip()
def host_print(msg, remote=True, leading_chars='\n'):
''' Print a raw message on the host. '''
cmd = 'echo "{0}{1}"'.format(leading_chars, msg)
with hide('running'):
if remote:
_run(cmd)
else:
_local(cmd)
def stop_mysql(self):
with settings(hide('running', 'stdout')):
result = local('service mysql stop')
return result.return_code == 0, "stop_mysql"
def start_mysql(self):
with settings(hide('running', 'stdout')):
return local('service mysql start --skip-slave-start').return_code == 0, "start_mysql"
def failover(self, *args, **kwargs):
cred_file = self.config.get('failover_creds', '/etc/mysql/failover.cnf')
master = kwargs.get('master_host')
if not master:
return False, "No master_host given"
with settings(hide('running')):
return local("/usr/bin/mysqlmaster.py switch --new-master {} --defaults-extra-file={} "
"--dead-master --assume-yes".format(master, cred_file)).return_code == 0, ""
def run_remote_command(host_string, command, timeout=NODE_COMMAND_TIMEOUT,
jsonresult=False,
catch_exitcodes=None):
"""Executes command on remote host via fabric run.
Optionally timeout may be specified.
If result of execution is expected in json format, then the output will
be treated as json.
"""
with settings(hide(NODE_STATUSES.running, 'warnings', 'stdout', 'stderr'),
host_string=host_string,
warn_only=True):
return execute_run(command, timeout=timeout, jsonresult=jsonresult,
catch_exitcodes=catch_exitcodes)
def compare_tar_against_git():
"""
Compare the contents of the tarball against git ls-files
"""
with hide("commands"):
with cd("/home/vagrant/repos/sympy"):
git_lsfiles = set([i.strip() for i in run("git ls-files").split("\n")])
tar_output_orig = set(show_files('source', print_=False).split("\n"))
tar_output = set()
for file in tar_output_orig:
# The tar files are like sympy-0.7.3/sympy/__init__.py, and the git
# files are like sympy/__init__.py.
split_path = full_path_split(file)
if split_path[-1]:
# Exclude directories, as git ls-files does not include them
tar_output.add(os.path.join(*split_path[1:]))
# print tar_output
# print git_lsfiles
fail = False
print()
print(blue("Files in the tarball from git that should not be there:",
bold=True))
print()
for line in sorted(tar_output.intersection(git_whitelist)):
fail = True
print(line)
print()
print(blue("Files in git but not in the tarball:", bold=True))
print()
for line in sorted(git_lsfiles - tar_output - git_whitelist):
fail = True
print(line)
print()
print(blue("Files in the tarball but not in git:", bold=True))
print()
for line in sorted(tar_output - git_lsfiles - tarball_whitelist):
fail = True
print(line)
if fail:
error("Non-whitelisted files found or not found in the tarball")
def create_db():
with settings(warn_only=True), hide('output', 'running'):
if env.get('settings'):
execute('servers.stop_service', 'uwsgi')
with shell_env(**app_config.database):
local('dropdb --if-exists %s' % app_config.database['PGDATABASE'])
if not env.get('settings'):
local('psql -c "DROP USER IF EXISTS %s;"' % app_config.database['PGUSER'])
local('psql -c "CREATE USER %s WITH SUPERUSER PASSWORD \'%s\';"' % (app_config.database['PGUSER'], app_config.database['PGPASSWORD']))
with shell_env(**app_config.database):
local('createdb %s' % app_config.database['PGDATABASE'])
if env.get('settings'):
execute('servers.start_service', 'uwsgi')
def ssh(self, command, use_sudo=False, quiet=False, **kwargs):
"""
Executes a shell command through ssh
:param command: The command to be executed
:param use_sudo: If True, it runs as sudo
:param quiet: Whether to hide the stdout/stderr output or not
:return: The fabric equivalent of run and sudo
:raise: SshError: If the command fails
"""
if self._vm_object:
self._wait_for_ssh_service(
kwargs['vcdriver_vm_ssh_username'],
kwargs['vcdriver_vm_ssh_password']
)
with fabric_context(
self.ip(),
kwargs['vcdriver_vm_ssh_username'],
kwargs['vcdriver_vm_ssh_password']
):
if use_sudo:
runner = sudo
else:
runner = run
if quiet:
with hide('everything'):
result = runner(command)
else:
result = runner(command)
if result.failed:
raise SshError(command, result.return_code, result.stdout)
return result
def winrm(self, script, winrm_kwargs=dict(), quiet=False, **kwargs):
"""
Executes a remote windows powershell script
:param script: A string with the powershell script
:param winrm_kwargs: The pywinrm Protocol class kwargs
:param quiet: Whether to hide the stdout/stderr output or not
:return: A tuple with the status code, the stdout and the stderr
:raise: WinRmError: If the command fails
"""
if self._vm_object:
self._wait_for_winrm_service(
kwargs['vcdriver_vm_winrm_username'],
kwargs['vcdriver_vm_winrm_password'],
**winrm_kwargs
)
winrm_session = self._open_winrm_session(
kwargs['vcdriver_vm_winrm_username'],
kwargs['vcdriver_vm_winrm_password'],
winrm_kwargs
)
if not quiet:
print('Executing remotely on {} ...'.format(self.ip()))
styled_print(Style.DIM)(script)
status, stdout, stderr = self._run_winrm_ps(winrm_session, script)
if not quiet:
styled_print(Style.BRIGHT)('CODE: {}'.format(status))
styled_print(Fore.GREEN)(stdout)
if status != 0:
if not quiet:
styled_print(Fore.RED)(stderr)
raise WinRmError(script, status, stdout, stderr)
else:
return status, stdout, stderr
def list_inner_scopes(root_dir, version):
with cd('{}/{}'.format(root_dir, version)), hide('running', 'stdout'):
result = run('''find ./ -mindepth 1 -maxdepth 1 -type d -print''')
return [each.lstrip('./') for each in result.splitlines()]
def list_existed_versions(root_dir):
"""
???????????????
"""
with cd(root_dir), hide('running', 'stdout'):
result = run('''( find ./ -mindepth 1 -maxdepth 1 -type d -print |grep --color=never -E '[0-9]+(\.[0-9]+){3}\\b' ) || echo "no_version_found"''')
if result == "no_version_found":
return []
else:
return [each.lstrip('./') for each in result.splitlines()]
def list_existed_diff_packages(version_dir):
with cd(version_dir), hide('running', 'stdout'):
result = run('''( find ./ -mindepth 1 -maxdepth 1 -type f -print |grep --color=never -E '[0-9]+(\.[0-9]+){3}$' ) || echo "no_diff_package_found"''')
if result == "no_diff_package_found":
return []
else:
return [each.lstrip('./') for each in result.splitlines()]
def check_local_merge_scripts(local_dir):
scripts = ['clear_small_user.sql', 'db.yml', 'forceId.sql', 'hf.py', 'hf_reward.sql', 'table.yml']
with settings(hide('everything')):
with lcd(local_dir):
for each_file in scripts:
local('test -f {}'.format(each_file))
for replace_str in ['first_force_id', 'second_force_id', 'third_force_id']:
local('grep {} forceId.sql >/dev/null'.format(replace_str))
local('grep "db: db1" db.yml')
local('grep "db: db2" db.yml')
def rsync_to_backup(game, region):
print("??????????????...")
sys.stdout.flush()
time.sleep(20)
config = ConfigReader(game, region)
rsync_module = config.get("rsync_module")
rsync_root = config.get("rsync_root")
rsync_backup_ip = config.get("rsync_backup_ip")
if rsync_module == "" or rsync_root == "" or rsync_backup_ip == "" :
raise Exception('rsync config is not proper in the game config file')
with cd(rsync_root), settings(user='root'), hide("stdout"):
run('''rsync -art -R --delete --out-format="%n" --password-file=/etc/rsyncd.secret ./ {}::{}'''.format(rsync_backup_ip, rsync_module))