def connect(self, sshhost = None, user = None, pwd = None, port = None):
"""
Connects this instance with the instance sshhost over SSH.
username -- the user and pwd is the password.
Port -- the port number to connect to.
See SSHClient.connect for more information on optional parameters
that can be set when using the underlying layer instead of this one.
"""
if sshhost is None:
sshhost = self.sshhost
if user is None:
user = self.username
if pwd is None:
pwd = self.pwd
if port is not None:
self.client.connect(sshhost, username = user, password = pwd, port=port)
else:
self.client.connect(sshhost, username = user, password = pwd)
python类client()的实例源码
def run(self, command):
"""
Runs a command over SSH on the client.
command -- the command to execute.
Return a tuple of the stdin, stdout, and stderr of the executing command,
as a 3-tuple.
"""
(ins, out, err) = self.client.exec_command(command)
if self.use_log:
with open("Skynet.log", 'a') as file:
file.write("executing command: %s\n" % command)
file.write("%s\n" % out.read().decode('utf-8'))
file.write("%s\n" % err.read().decode('utf-8'))
file.write("Exit status: %i\n\n" % out.channel.recv_exit_status())
return (ins, out, err)
def run_as_stream(self, command, logpath):
"""
runs a command over SSH on the client as a stream.
i.e. for every line written by the client during running of command write this to logfile
do not stop untill ssh has no more lines to write
returns -- the output of the command as a string
"""
(ins, stdout, stderr) = self.client.exec_command(command)
f = open(logpath, "a+")
for line in stdout.readlines():
print line
f.write(line)
for line in stderr.readlines():
print line
f.write(line)
def __init__(self, bucket, s3_prefix=None, aws_access_key_id=None, aws_secret_access_key=None,
region_name=None):
"""
todo
:type bucket: str
:param bucket:
:type s3_prefix: str
:param s3_prefix:
:type aws_access_key_id: str
:param aws_access_key_id:
:type aws_secret_access_key: str
:param aws_secret_access_key:
:type region_name: str
:param region_name:
"""
self.client = boto3.resource(
's3',
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region_name
)
self.bucket = self.client.Bucket(bucket)
self.s3_prefix = s3_prefix
def __init__(self, client, channel, progress_stderr=None):
self.client = client
self.channel = channel
self.progress_stderr = progress_stderr
self.should_monitor = bool(progress_stderr) or True
self.monitor_thread = None
self.stderr = b''
# Channel must block
self.channel.setblocking(True)
# Start
if self.should_monitor:
self.monitor_thread = threading.Thread(
target=self.monitor_stderr)
self.monitor_thread.start()
def run_command(self, host, command, username=None, port=None,
progress_stderr=None):
if not isinstance(command, bytes):
raise TypeError(command)
# Paramiko needs an explicit port. None is not valid
if port is None:
port = 22
client = paramiko.SSHClient()
policy = paramiko.client.MissingHostKeyPolicy()
client.set_missing_host_key_policy(policy)
client.connect(host, username=username, port=port,
**self.ssh_kwargs)
# Open SSH session
channel = client.get_transport().open_session()
# Run commands
channel.exec_command(command)
return _ParamikoWrapper(
client, channel, progress_stderr=progress_stderr)
def exec_command(self, client, command, timeout=None, get_pty=False, environment=None):
channel = client._transport.open_session(timeout=timeout)
if get_pty:
width, height = get_terminal_size()
channel.get_pty(width=width, height=height)
channel.settimeout(timeout)
if environment:
channel.update_environment(environment)
channel.exec_command(command)
return channel
def get_client(cls, host, user, debug=False):
key = (host, user)
if key not in cls.clients:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(host, username=user)
cls.clients[key] = client
if debug:
printer.debug('Created SSH connection for {user}@{host}'.format_map(locals()))
else:
printer.debug('Using existing SSH connection for {user}@{host}'.format_map(locals()))
return cls.clients[key]
def cleanup(cls):
for client in cls.clients.values():
client.close()
def __init__(self, sshhost, username, pwd, port=None, use_log=False):
"""
Constructor for the SSH class.
This constructor connects automatically to sshhost over ssh so manual
calling of the connect function is not necessary.
sshhost -- the host to connect to.
username -- the username to use.
pwd -- the password.
port -- the port to connect to.
use_log -- Whether to log the ssh connection in a logfile. (Default is False)
"""
self.sshhost = sshhost
self.username = username
self.pwd = pwd
self.client = SSHClient()
# only activate when non-agent, as this will throw unhandled prompts
# self.client.set_missing_host_key_policy(WarningPolicy())
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.client.load_system_host_keys()
#this is done because the buyers return before they finish buying a vps, which might connect timeouts
#ToDo make vpsbuyers terminate only once server is online https://github.com/Skynet2-0/Skynet2.0/issues/60
tries = 30
while tries>=0:
try:
self.connect(port=port)
break
except socket.error:
#retry to connect
tries-=1
s = Settings()
if s.enable_global_ssh_logging():
self.use_logfile(True)
else:
self.use_logfile(use_log)
def close_connection(self):
"""
Closes the SSH connection between this and the client.
"""
self.client.close()
asg_rolling_upgrade.py 文件源码
项目:asg-rolling-upgrade
作者: crunch-accounting
项目源码
文件源码
阅读 19
收藏 0
点赞 0
评论 0
def connect(self, autoscaling_client=None, ec2=None, ec2_client=None):
""" Opens connections to AWS, specifically the autoscaling client and
EC2 client and resource.
Args:
autoscaling_client: Override the autoscaling client.
ec2: Override the EC2 resource.
ec2_client: Override the EC2 client.
"""
print('Connecting to AWS...')
self._as_client = autoscaling_client or boto3.client('autoscaling')
self._asg_paginator = self._as_client.get_paginator(
'describe_auto_scaling_groups')
self._ec2 = ec2 or boto3.resource('ec2')
self._ec2_client = ec2_client or boto3.client('ec2')