def get_node_state(node):
status = {}
if node.get('status') == NODE_STATUSES.pending:
status[NODE_STATUSES.pending] = False
return status
hostname = node['hostname']
status[NODE_STATUSES.running] = node.get('status') == NODE_STATUSES.running
env.host_string = hostname
try:
status['ntp'] = False
status['services'] = False
# AC-3105 Fix. Check if master can connect to node via ssh.
if can_ssh_to_host(hostname):
rv = run('ntpstat', quiet=True, timeout=SSH_TIMEOUT)
if rv.succeeded:
status['ntp'] = True
status['ssh'] = True
stopped = get_stopped_services(node_services, local=False)
status['services'] = stopped if stopped else True
status['disk'] = check_disk_space(local=False)
else:
status['ssh'] = False
except (NetworkError, CommandTimeout):
status['ssh'] = False
return status
python类host_string()的实例源码
def set_env(config, version_tag=None):
"""
Fabric environmental variable setup
"""
# Bug: when setting this inside a function. Using host_string as workaround
config_dict = get_config(config)
env.hosts = [config_dict['HOST_NAME'], ]
env.host_string = config_dict['HOST_NAME']
env.project_name = config_dict['PROJECT_NAME']
env.project_dir = posixpath.join('/srv/images/', env.project_name)
env.use_ssh_config = True
env.image_name = config_dict['IMAGE'].split(':')[0]
env.base_image_name = env.image_name + '_base'
env.version_tag = version_tag
env.build_dir = '/srv/build'
env.local_path = os.path.dirname(__file__)
def set_env(config, version_tag=None):
"""
Fabric environmental variable setup
"""
# Bug: when setting this inside a function. Using host_string as workaround
config_dict = get_config(config)
env.hosts = [config_dict['HOST_NAME'], ]
env.host_string = config_dict['HOST_NAME']
env.project_name = config_dict['PROJECT_NAME']
env.project_dir = posixpath.join('/srv/images/', env.project_name)
env.use_ssh_config = True
env.image_name = config_dict['IMAGE'].split(':')[0]
env.base_image_name = env.image_name + '_base'
env.version_tag = version_tag
env.build_dir = '/srv/build'
env.local_path = os.path.dirname(__file__)
def runs_on(target):
"""
A decorator that picks the correct target server from the inventory
file.
Can be called with either target = 'hadoop_master' or 'spark_master'
(which can be different machines)
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
if target == 'hadoop_master':
env.host_string = hadoop_master
elif target == 'spark_master':
env.host_string = spark_master
else:
raise ValueError('Unhandled target %d' % target)
func(*args, **kwargs)
return wrapper
return decorator
def upgrade_kernel_and_grub(do_reboot=False, log=True):
""" updates the kernel and the grub config """
print(env.host_string)
if log:
log_yellow('upgrading kernel')
with settings(hide('running', 'stdout')):
sudo('unset UCF_FORCE_CONFFOLD; '
'export UCF_FORCE_CONFFNEW=YES; '
'ucf --purge /boot/grub/menu.lst; '
'export DEBIAN_FRONTEND=noninteractive ; '
'apt-get update; '
'apt-get -o Dpkg::Options::="--force-confnew" --force-yes -fuy '
'dist-upgrade')
with settings(warn_only=True):
if do_reboot:
if log:
log_yellow('rebooting host')
reboot()
def tcp_port(self):
"""
The tcp port used for the game server. Will try to get only once and save to self._tcp_port for later use.
"""
def get_tcp_port():
cmd = '''grep 'name="port" type="int"' conf.xml |awk -F[\<\>] '{print $3}' '''
with settings(host_string=self.int_ip), cd('/app/{}/backend/apps'.format(self.name)):
result = run(cmd)
lines = result.splitlines()
if len(lines) == 1:
return int(lines[0])
else:
raise Exception("Can't get tcp port using cmd: {}".format(cmd))
if not self._tcp_port:
self._tcp_port = get_tcp_port()
return self._tcp_port
def dns(self):
"""
The dns for the game server. Will try to get only once and save to self._dns for later use.
"""
def get_dns():
cmd = '''grep server_name %s.conf | awk '{print $2}' | tr -d ";" ''' % self.name
with settings(host_string=self.int_ip), cd('/app/nginx/conf/vhost'.format(self.name)):
result = run(cmd)
lines = result.splitlines()
if len(lines) == 1:
return lines[0]
else:
raise Exception("Can't get dns using cmd: {}".format(cmd))
if not self._dns:
self._dns = get_dns()
return self._dns
def bundle_merge(file):
merge_details = parse_merge_list(file)
print(merge_details)
target_servers = merge_details.keys()
print(target_servers)
@parallel(pool_size=2) #???????????
def _bundle_merge():
target_server = GameServer(env.host_string)
source_server_info_list = merge_details[target_server]
for each_server, each_sequence in source_server_info_list[:-1]:
each_source_server = GameServer(each_server)
single_merge(each_source_server, target_server, each_sequence)
#???????????????????????????????????
#??: ???37wan_8,37wan_9,37wan_10??37wan_7???37wan_10??????????????????37wan_7?
last_source_server, last_sequence = source_server_info_list[-1]
single_merge(last_source_server, target_server, last_sequence, restart='yes')
execute(_bundle_merge, hosts=target_servers)
print('Done!')
def install(host_config):
env.host_string = helper.get_env_host_string(host_config)
env.user = helper.get_env_user(host_config)
env.key_filename = helper.get_env_key_filename(host_config)
software_config = helper.get_software_config(host_config, 'redis')
redis_version = software_config.get('version', '3.2.6')
redis_port = software_config.get('port', '6379')
redis_data_dir = software_config.get('data-directory', '/var/lib/redis')
machine.disable_transparent_huge_pages(env.host_string)
machine.set_overcommit_memory(env.host_string, 1)
put('{}/software/scripts/redis.sh'.format(os.getcwd()), '~/', use_sudo=True)
sudo("chmod +x redis.sh")
sudo(". ~/redis.sh {} {} {}".format(redis_version, redis_port, redis_data_dir))
def install(host_config):
env.host_string = helper.get_env_host_string(host_config)
env.user = helper.get_env_user(host_config)
env.key_filename = helper.get_env_key_filename(host_config)
software_config = helper.get_software_config(host_config, 'zookeeper')
java.v8_install(host_config)
port = software_config.get('port', '2181')
zk_server_id = software_config.get('id', '0')
zk_nodes = ",".join(software_config.get('nodes'))
put('{}/software/scripts/zookeeper.sh'.format(os.getcwd()), '~/', use_sudo=True)
sudo("chmod +x zookeeper.sh")
sudo(". ~/zookeeper.sh {} {} {}".format(port, zk_server_id, zk_nodes))
def mount_ebs_volumes(host_config):
env.host_string = helper.get_env_host_string(host_config)
env.user = helper.get_env_user(host_config)
env.key_filename = helper.get_env_key_filename(host_config)
sudo("apt-get -y install xfsprogs")
for ebs in host_config['ec2-mounts']:
device = ebs['device']
mount = ebs['mount']
sudo("mkdir -p {}".format(mount))
sudo("mv /etc/fstab /etc/fstab.old")
sudo("touch /etc/fstab")
if sudo('mkfs.xfs -f {0}'.format(device), warn_only=True):
run("echo '{0}\t{1}\txfs\tdefaults\t0\t0' | sudo tee -a /etc/fstab".format(device, mount))
sudo('sudo mount -a')
logger.info("EBS volume {} : {} mounted.".format(device, mount))
def connect_to_instance_in_ssh(address, keypair_path, user='root'):
"""
Run the command LS on a given instance
:param address: ip or dns name of a machine
:type address: str
:param keypair_path: keypair path
:type keypair_path: str
"""
env.host_string = address
env.user = user
env.parallel = False
env.key_filename = keypair_path
env.disable_known_hosts = True
env.connection_attempts = 10
env.timeout = 120
ocb.log(run('ls -la /root'), level='INFO')
def demo():
answer = prompt('Are you sure you want to DELETE ALL DATA on "{0}" and replace it with demo data? (type "I am sure" to continue):'.format(env.host_string))
if answer != 'I am sure':
abort('Aborted!')
password = None
while not password:
password = prompt('Choose a password:')
with temp():
put('radar.sql', 'radar.sql')
run_db('drop')
run_db('create')
run_db('restore radar.sql') # Note: user must be a PostgreSQL superuser to run this
run_fixtures('users --password {0}'.format(password))
run_fixtures('patients --patients 95 --no-data')
run_fixtures('patients --patients 5 --data')
def run_download_db(filename=None):
"""
Downloads the database from the server into your local machine.
In order to import the downloaded database, run ``fab import_db``
Usage::
fab prod run_download_db
fab prod run_download_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR'), filename))
def run_download_media(filename=None):
"""
Downloads the media dump from the server into your local machine.
In order to import the downloaded media dump, run ``fab import_media``
Usage::
fab prod run_download_media
fab prod run_download_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR'), filename))
def run_upload_db(filename=None):
"""
Uploads your local database to the server.
You can create a local dump with ``fab export_db`` first.
In order to import the database on the server you still need to SSH into
the server.
Usage::
fab prod run_upload_db
fab prod run_upload_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0} {1}:{3}'.format(
filename, ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')))
def _config_fabric(self):
env.host_string = 'localhost'
env.warn_only = True
for c in output.keys():
output[c] = False
def tsudo(target, cmd):
env.host_string = target
return sudo(cmd)
def upload_keys():
"""
Upload the SSH public/private keys to the remote server via scp
"""
scp_command = 'scp {} {}/authorized_keys {}@{}:~/.ssh'.format(
env.ssh_keys_name + '.pub',
env.ssh_keys_dir,
env.user_name,
env.host_string
)
local(scp_command)
def pre_start_hook(app):
from ..nodes.models import Node
# env.warn_only = True
env.user = 'root'
env.key_filename = SSH_KEY_FILENAME
output.stdout = False
output.running = False
PLUGIN_DIR = '/usr/libexec/kubernetes/kubelet-plugins/net/exec/kuberdock/'
with app.app_context():
for node in Node.query.all():
env.host_string = node.hostname
put('./node_network_plugin.sh', PLUGIN_DIR + 'kuberdock')
put('./node_network_plugin.py', PLUGIN_DIR + 'kuberdock.py')
run('systemctl restart kuberdock-watcher')
print 'Kuberdock node parts are updated'
def upload():
"""Upload entire project to server"""
# Bug: when setting this inside a function. Using host_string as workaround
run('mkdir -p /srv/images/'+env.project_name+'/')
rsync_project(
env.project_dir, './',
exclude=(
'.git', '.gitignore', '__pycache__', '*.pyc', '.DS_Store', 'environment.yml',
'fabfile.py', 'Makefile', '.idea', 'bower_components', 'node_modules',
'.env.example', 'README.md', 'var'
), delete=True)
# Wrapper Functions:
def upload():
"""Upload entire project to server"""
# Bug: when setting this inside a function. Using host_string as workaround
run('mkdir -p /srv/images/'+env.project_name+'/')
rsync_project(
env.project_dir, './',
exclude=(
'.git', '.gitignore', '__pycache__', '*.pyc', '.DS_Store', 'environment.yml',
'fabfile.py', 'Makefile', '.idea', 'bower_components', 'node_modules',
'.env.example', 'README.md', 'var'
), delete=True)
# Wrapper Functions:
def set_server_config(json_data):
env.host_string = json_data.get('server_ip', '127.0.0.1')
env.user = json_data.get('user', getuser())
env.password = json_data.get('password', '')
def backup_db(db_name, db_user, password, db_server=env.host_string):
with cd('/tmp'):
local('pg_dump -U {user} -h {ip} {db} > {db}-bak.sql'.format(
user=db_user,
ip=db_server,
pwd=password,
db=db_name
))
def restore_db(db_name, db_user, password, db_server=env.host_string):
local('psql -U {user} -h {ip} {db} < /tmp/{db}-bak.sql'.format(
user=db_user,
ip=db_server,
pwd=password,
db=db_name
))
def retrieve_file_names(files):
"""
???????????????????????????
Uploader ??????????????????????
"""
filenames = [os.path.basename(each) for each in files]
for each_file in files:
if not file_exists_check:
raise Exception("File {} doesn't exist on {}".format(each_file, env.host_string))
if len(set(filenames)) != len(files):
raise Exception('Duplicate file names in the files: {}'.format(files))
return filenames
def mkdir(self, remote_dir):
with settings(host_string=self.int_ip):
run(' [ -d {0} ] || mkdir -p {0} '.format(remote_dir))
def ext_ip(self):
"""
The external ip for the game server. Will try to get only once and save to self._ext_ip for later use.
"""
if not self._ext_ip:
with settings(host_string=self.int_ip):
self._ext_ip = run('''curl -s ip.cn |awk '{split($2,x,"?");print x[2]}' ''')
return self._ext_ip
def _operation(self, action):
with settings(host_string=self.int_ip):
run('set -m; /app/{}/backend/bin/startup.sh {} && sleep 0.2'.format(self.name, action), warn_only=True)
def sql_exec(self, sql_file):
with settings(host_string=self.int_ip):
run('pandora --update {} <{}'.format(self.name, sql_file))