def bytes_to_site_size(self, byte_num):
humanized = humanize.naturalsize(byte_num, format='%.2f', binary=True)
if 'KiB' in humanized:
humanized = humanize.naturalsize(byte_num, format='%d', binary=True)
return humanized
python类naturalsize()的实例源码
def ap_list(host):
hosts = service.expand_host(host[0])
def _calc_load(x):
return x / 65535
@coroutine
def _run():
rows = []
aps = yield service.create_multiple_ap(hosts)
details = yield service.ap_list(aps)
header_out('name, host, #clients, loadavg, mem, uptime')
for ap in details:
row = []
row.append(_val(ap, 'board.hostname'))
row.append(_val(ap, 'host'))
row.append('%s' % (_val(ap, 'num_clients')))
row.append('%.2f / %.2f / %.2f' % (
_val(ap, 'system.load.0', _calc_load),
_val(ap, 'system.load.1', _calc_load),
_val(ap, 'system.load.2', _calc_load)))
row.append('%s / %s' %
(_val(ap, 'system.memory.free', naturalsize),
_val(ap, 'system.memory.total', naturalsize)))
row.append('%s' % (_val(ap, 'system.uptime', naturaltime)))
rows.append(', '.join(row))
out('\n'.join(sorted(rows, cmp_host)))
IOLoop.instance().run_sync(_run)
def eval_command(self, args):
tw = TabWriter()
if args["digests"]:
tw.padding = [3, 10, 3, 8, 8]
fm = self.digestsTemplate
tw.writeln(
"REPOSITORY\tTAG\tDIGEST\tIMAGE ID\tCREATED\tSIZE")
elif args["format"] is None:
tw.padding = [3, 10, 8, 8]
fm = self.defaultTemplate
tw.writeln(
"REPOSITORY\tTAG\tIMAGE ID\tCREATED\tSIZE")
else:
fm = args["format"]
self.settings[self.name] = ""
del args["digests"]
del args["format"]
args["filters"] = dict(args["filters"]) if args["filters"] else None
nodes = self.client.images(**args)
for node in nodes:
try:
node["Repository"], node["Tag"] = node[
"RepoTags"][0].split(":")
except TypeError:
node["Repository"] = node["RepoDigests"][0].split('@', 2)[0]
node["Tag"] = "<none>"
node["Digest"] = node["RepoDigests"][0].split('@', 2)[1] if node[
"RepoDigests"] else '<' + str(node["RepoDigests"]) + '>'
node["Id"] = node["Id"].split(":")[1][:12]
node["Created"] = arrow.get(node["Created"]).humanize()
node["Size"] = humanize.naturalsize(node["VirtualSize"])
tw.writeln(pystache.render(fm, node))
self.settings[self.name] = str(tw)
def eval_command(self, args):
tw = TabWriter()
if args["digests"]:
tw.padding = [3, 10, 3, 8, 8]
fm = self.digestsTemplate
tw.writeln(
"REPOSITORY\tTAG\tDIGEST\tIMAGE ID\tCREATED\tSIZE")
elif args["format"] is None:
tw.padding = [3, 10, 8, 8]
fm = self.defaultTemplate
tw.writeln(
"REPOSITORY\tTAG\tIMAGE ID\tCREATED\tSIZE")
else:
fm = args["format"]
self.settings[self.name] = ""
del args["digests"]
del args["format"]
args["filters"] = dict(args["filters"]) if args["filters"] else None
nodes = self.client.images(**args)
for node in nodes:
try:
node["Repository"], node["Tag"] = node[
"RepoTags"][0].split(":")
except TypeError:
node["Repository"] = node["RepoDigests"][0].split('@', 2)[0]
node["Tag"] = "<none>"
node["Digest"] = node["RepoDigests"][0].split('@', 2)[1] if node[
"RepoDigests"] else '<' + str(node["RepoDigests"]) + '>'
node["Id"] = node["Id"].split(":")[1][:12]
node["Created"] = arrow.get(node["Created"]).humanize()
node["Size"] = humanize.naturalsize(node["VirtualSize"])
tw.writeln(pystache.render(fm, node))
self.settings[self.name] = str(tw)
def eval_command(self, args):
tw = TabWriter()
if args["digests"]:
tw.padding = [3, 10, 3, 8, 8]
fm = self.digestsTemplate
tw.writeln(
"REPOSITORY\tTAG\tDIGEST\tIMAGE ID\tCREATED\tSIZE")
elif args["format"] is None:
tw.padding = [3, 10, 8, 8]
fm = self.defaultTemplate
tw.writeln(
"REPOSITORY\tTAG\tIMAGE ID\tCREATED\tSIZE")
else:
fm = args["format"]
self.settings[self.name] = ""
del args["digests"]
del args["format"]
args["filters"] = dict(args["filters"]) if args["filters"] else None
nodes = self.client.images(**args)
for node in nodes:
try:
node["Repository"], node["Tag"] = node[
"RepoTags"][0].split(":")
except TypeError:
node["Repository"] = node["RepoDigests"][0].split('@', 2)[0]
node["Tag"] = "<none>"
node["Digest"] = node["RepoDigests"][0].split('@', 2)[1] if node[
"RepoDigests"] else '<' + str(node["RepoDigests"]) + '>'
node["Id"] = node["Id"].split(":")[1][:12]
node["Created"] = arrow.get(node["Created"]).humanize()
node["Size"] = humanize.naturalsize(node["VirtualSize"])
tw.writeln(pystache.render(fm, node))
self.settings[self.name] = str(tw)
def download(url, filename, progress_data=None, session=None, silent=False):
"""
Initiate a file download and display the progress
Args:
url(str): Download URL
filename(str): Path to save the file to
progress_data(dict): Static information to display above the progress bar
session(Session): An optional download session to use
silent(bool): Download the file, but don't print any output
Returns:
"""
# Set up our requests session and make sure the filepath exists
session = session or Session()
os.makedirs(os.path.dirname(filename), 0o755, True)
# Test the connection
response = session.head(url, allow_redirects=True) # type: Response
response.raise_for_status()
# Get some information about the file we are downloading
filesize = naturalsize(response.headers.get('content-length', 0))
filetype = response.headers.get('content-type', 'Unknown')
# Format the information output
info_lines = [
click.style('Saving to: ', bold=True) + filename,
click.style('File type: ', bold=True) + filetype,
click.style('File size: ', bold=True) + filesize
]
if progress_data:
for key, value in progress_data.items():
info_lines.append('{key} {value}'.format(key=click.style(key + ':', bold=True), value=value))
# Print the static information now
click.echo()
for line in info_lines:
click.echo(line)
# Now let's make the real download request
response = session.get(url, allow_redirects=True) # type: Response
# Process the download
with open(filename, 'wb') as file:
length = int(response.headers.get('content-length', 0))
with click.progressbar(response.iter_content(1024), (length / 1024)) as progress:
for chunk in progress:
if chunk:
file.write(chunk)
file.flush()
def export_tar(tree, storage, output, compression=None):
""" Export a tree in tar format.
"""
mode = 'w'
if compression in ('gz', 'bz2', 'xz'):
mode += ':' + compression
with tarfile.open(output, mode) as tar:
for fullname, item in walk_tree(storage, tree):
payload = None
info = tarfile.TarInfo()
info.name = fullname.decode('utf-8', 'ignore')
if item.type == 'blob':
payload = storage.get_blob(item.ref).blob
info.type = tarfile.REGTYPE
info.size = item['size']
printer.verbose('Adding to {out}: <b>{fn}</b> ({size})',
out=output,
fn=fullname.decode('utf-8', errors='ignore'),
size=humanize.naturalsize(item['size'], binary=True))
elif item.type == 'tree':
info.type = tarfile.DIRTYPE
printer.verbose('Adding to {out}: <b>{fn}</b> (directory)',
out=output,
fn=fullname.decode('utf-8', errors='ignore'))
else:
if item['filetype'] == 'link':
info.type = tarfile.SYMTYPE
info.linkname = item['link']
printer.verbose('Adding to {out}: <b>{fn}</b> (link to {link})',
out=output,
fn=fullname.decode('utf-8', errors='ignore'),
link=item['link'].decode('utf-8', errors='replace'))
elif item['filetype'] == 'fifo':
info.type = tarfile.FIFOTYPE
printer.verbose('Adding to {out}: <b>{fn}</b> (fifo)',
out=output,
fn=fullname.decode('utf-8', errors='ignore'))
else:
continue # Ignore unknown file types
# Set optional attributes:
info.mode = item.get('mode')
info.uid = item.get('uid')
info.gid = item.get('gid')
info.mtime = item.get('mtime')
# Add the item into the tar file:
tar.addfile(info, payload)
def export_directory(tree, storage, output):
""" Export a tree in a directory.
"""
os.mkdir(output)
for fullname, item in walk_tree(storage, tree):
outfullname = os.path.join(output.encode('utf-8'), fullname.lstrip(b'/'))
if item.type == 'blob':
blob = storage.get_blob(item.ref).blob
with open(outfullname, 'wb') as fout:
shutil.copyfileobj(blob, fout)
printer.verbose('Exporting to {out}: <b>{fn}</b> ({size})',
out=output,
fn=fullname.decode('utf-8', errors='replace'),
size=humanize.naturalsize(item['size'], binary=True))
elif item.type == 'tree':
os.mkdir(outfullname)
printer.verbose('Exporting to {out}: <b>{fn}</b> (directory)',
out=output,
fn=fullname.decode('utf-8', errors='replace'))
else:
if item['filetype'] == 'link':
os.symlink(item['link'], outfullname)
printer.verbose('Exporting to {out}: <b>{fn}</b> (link to {link})',
out=output,
fn=fullname.decode('utf-8', errors='replace'),
link=item['link'].decode('utf-8', errors='replace'))
elif item['filetype'] == 'fifo':
os.mkfifo(outfullname)
printer.verbose('Exporting to {out}: <b>{fn}</b> (fifo)',
out=output,
fn=fullname.decode('utf-8', errors='replace'))
else:
continue # Ignore unknown file types
try:
if 'mode' in item:
try:
os.chmod(outfullname, item['mode'], follow_symlinks=False)
except SystemError:
pass # Workaround follow_symlinks not implemented in Python 3.5 (bug?)
if 'uid' in item or 'gid' in item:
os.chown(outfullname, item.get('uid', -1), item.get('gid', -1), follow_symlinks=False)
except PermissionError:
printer.p('<color fg=yellow><b>Warning:</b> unable to set attributes on {fn}</color>',
fn=fullname.decode('utf-8', errors='replace'))
def server_info():
"""Return server statistics."""
started = time()
proc = Process()
process_size = proc.memory_info()
stats = []
memory = virtual_memory()
stats.append(
'Memory: {used} used of {total} ({percent}%)'.format(
used=naturalsize(memory.used),
total=naturalsize(memory.total),
percent=memory.percent
)
)
bt = boot_time()
uptime = time() - bt
stats.append(
'Server Uptime: {delta} since {booted}'.format(
delta=format_timedelta(timedelta(seconds=uptime)),
booted=ctime(bt)
)
)
if server.server.started is not None:
stats.append(
'Process Uptime: {} since {}'.format(
format_timedelta(datetime.utcnow() - server.server.started),
server.server.started.ctime()
)
)
stats.append(
'OS Version: {} ({})'.format(
platform(),
architecture()[0]
)
)
stats.append(
'{type} Version: {version}'.format(
type=python_implementation(),
version=version
)
)
stats.append('Number Of Threads: %d' % proc.num_threads())
stats.append('Process Memory:')
stats.append('Real: %s' % naturalsize(process_size.rss))
stats.append('Virtual: %s' % naturalsize(process_size.vms))
stats.append('Percent: %.2f' % proc.memory_percent())
stats.append('Statistics generated in %.2f seconds.' % (time() - started))
return '\n'.join(stats)
def eval_command(self, args):
try:
stats = []
containers = args["containers"]
del args["containers"]
args["decode"] = True
for container in containers:
args["container"] = container
stats.append(self.client.stats(**args))
"""
for line in self.client.stats(**args):
for iterElement in list(json_iterparse.json_iterparse(line)):
# self.output(iterElement, args)
print iterElement
"""
clear()
put_cursor(0, 0)
print pprint_things(
"CONTAINER\tCPU %\tMEM USAGE / LIMIT\tMEM %\tNET I/O\tBLOCK I/O\tPIDS"),
while True:
y = 1
for stat in stats:
put_cursor(0, y)
y += 1
tmp = next(stat)
tmp["Id"] = tmp["id"][:12]
tmp["Cpu"] = (tmp["cpu_stats"]["cpu_usage"][
"total_usage"] / tmp["cpu_stats"]["system_cpu_usage"])
tmp["MemUsage"] = humanize.naturalsize(
tmp["memory_stats"]["usage"])
tmp["Limit"] = humanize.naturalsize(
tmp["memory_stats"]["limit"])
tmp["Mem"] = (tmp["memory_stats"]["usage"] /
tmp["memory_stats"]["limit"])
tmp["NetInput"] = humanize.naturalsize(
tmp["networks"]["eth0"]["rx_bytes"])
tmp["NetOutput"] = humanize.naturalsize(
tmp["networks"]["eth0"]["tx_bytes"])
tmp["Pids"] = tmp["pids_stats"]["current"]
print pprint_things(pystache.render(self.defaultTemplate, tmp))
except KeyboardInterrupt:
put_cursor(0, y)
colorama.deinit()
raise KeyboardInterrupt
put_cursor(0, y)
colorama.deinit()
self.settings[self.name] = "\r"