def doctor(args):
ver = dckr.version()['Version']
if ver.endswith('-ce'):
curr_version = version.parse(ver.replace('-ce', ''))
else:
curr_version = version.parse(ver)
min_version = version.parse('1.9.0')
ok = curr_version >= min_version
print ('docker version ... {1} ({0})'.format(ver, 'ok' if ok else 'update to {} at least'.format(min_version)))
print ('bgperf image',)
if img_exists('bgperf/exabgp'):
print ('... ok')
else:
print ('... not found. run `bgperf prepare`')
for name in ['gobgp', 'bird', 'quagga']:
print ('{0} image'.format(name),)
if img_exists('bgperf/{0}'.format(name)):
print ('... ok')
else:
print ('... not found. if you want to bench {0}, run `bgperf prepare`'.format(name))
print ('/proc/sys/net/ipv4/neigh/default/gc_thresh3 ... {0}'.format(gc_thresh3()))
python类parse()的实例源码
bgperf.py 文件源码
项目:Python-Network-Programming-Cookbook-Second-Edition
作者: PacktPublishing
项目源码
文件源码
阅读 20
收藏 0
点赞 0
评论 0
def _discover_dependencies(self, options):
# Requirements cannot be assumed to be modules / packages
# options['hiddenimports'].extend(self.distribution.install_requires)
if version.parse(sys.version[0:3]) >= version.parse('3.4'):
for package in self.distribution.packages:
options['hiddenimports'].extend(collect_submodules(package))
module_files = self._compile_modules()
required_module_files, required_binary_files = self._compile_requirements(
)
for required_file in required_module_files:
try:
options['hiddenimports'].append(module_files[required_file])
except KeyError:
logger.debug(
'Unable to collect module for {}'.format(required_file))
for required_file in required_binary_files:
# FIXME: Add to binaries rather than simply appending to pathex.
options['pathex'].append(os.path.dirname(required_file))
options['pathex'] = list(set(options['pathex']))
def task_dockercompose():
'''
assert docker-compose version ({0}) or higher
'''
from utils.function import format_docstr
format_docstr(task_dockercompose, MINIMUM_DOCKER_COMPOSE_VERSION)
def check_docker_compose():
import re
from subprocess import check_output
from packaging.version import parse as version_parse
pattern = '(docker-compose version) ([0-9.]+(-rc[0-9])?)(, build [a-z0-9]+)'
output = check_output('docker-compose --version', shell=True).decode('utf-8').strip()
regex = re.compile(pattern)
match = regex.search(output)
version = match.groups()[1]
assert version_parse(version) >= version_parse(MINIMUM_DOCKER_COMPOSE_VERSION)
return {
'actions': [
check_docker_compose,
],
}
def releases(packages):
""" List released package versions """
with JSONMapper() as jmap:
for pkg in packages:
try:
project_url = pkg["info"]["project_url"]
except KeyError:
project_url = pkg["info"]["package_url"]
if not project_url.endswith('/'):
project_url += '/'
jmap.append(
pkg["info"]["name"],
[{
"version": version,
"is_prerelease": parse(version).is_prerelease,
"release_date": first_upload(pkg["releases"][version]),
"release_url": project_url + version,
} for version in sorted(pkg["releases"], key=parse)],
)
def lookup_package_version(self, args):
for spec in args:
name, eq, version = spec.partition('=')
try:
if eq != '':
yield self.get_version(name, version.lstrip('='))
elif self.all_versions:
p = self.get_package(name)
for v in sorted(p["releases"], key=parse):
if self.pre or not parse(v).is_prerelease:
if v == p["info"]["version"]:
yield p
else:
### TODO: Can this call ever fail?
yield self.get_version(name, v)
else:
yield self.get_latest_version(name)
except QyPIError as e:
self.errmsgs.append(str(e))
def sha256_checksum(filename, block_size=65536):
sha256 = hashlib.sha256()
with open(filename, 'rb') as f:
for block in iter(lambda: f.read(block_size), b''):
sha256.update(block)
return sha256.hexdigest()
# quick and dirty function to parse the Packages file and store the result int he packages dict
def _parse_pypi_json_package_info(self, package_name, current_version, response):
"""
:type package_name: str
:type current_version: version.Version
:type response: requests.models.Response
"""
data = response.json()
all_versions = [version.parse(vers) for vers in data['releases'].keys()]
filtered_versions = [vers for vers in all_versions if not vers.is_prerelease and not vers.is_postrelease]
if not filtered_versions: # pragma: nocover
return False, 'error while parsing version'
latest_version = max(filtered_versions)
# even if user did not choose prerelease, if the package from requirements is pre/post release, use it
if self._prerelease or current_version.is_postrelease or current_version.is_prerelease:
prerelease_versions = [vers for vers in all_versions if vers.is_prerelease or vers.is_postrelease]
if prerelease_versions:
latest_version = max(prerelease_versions)
try:
try:
latest_version_info = data['releases'][str(latest_version)][0]
except KeyError: # pragma: nocover
# non-RFC versions, get the latest from pypi response
latest_version = version.parse(data['info']['version'])
latest_version_info = data['releases'][str(latest_version)][0]
except Exception: # pragma: nocover
return False, 'error while parsing version'
upload_time = latest_version_info['upload_time'].replace('T', ' ')
return {
'name': package_name,
'current_version': current_version,
'latest_version': latest_version,
'upgrade_available': current_version < latest_version,
'upload_time': upload_time
}, 'success'
def _parse_simple_html_package_info(self, package_name, current_version, response):
"""
:type package_name: str
:type current_version: version.Version
:type response: requests.models.Response
"""
pattern = r'<a.*>.*{name}-([A-z0-9\.-]*)(?:-py|\.tar).*<\/a>'.format(name=re.escape(package_name))
versions_match = re.findall(pattern, response.content.decode('utf-8'), flags=re.IGNORECASE)
all_versions = [version.parse(vers) for vers in versions_match]
filtered_versions = [vers for vers in all_versions if not vers.is_prerelease and not vers.is_postrelease]
if not filtered_versions: # pragma: nocover
return False, 'error while parsing version'
latest_version = max(filtered_versions)
# even if user did not choose prerelease, if the package from requirements is pre/post release, use it
if self._prerelease or current_version.is_postrelease or current_version.is_prerelease:
prerelease_versions = [vers for vers in all_versions if vers.is_prerelease or vers.is_postrelease]
if prerelease_versions:
latest_version = max(prerelease_versions)
return {
'name': package_name,
'current_version': current_version,
'latest_version': latest_version,
'upgrade_available': current_version < latest_version,
'upload_time': '-'
}, 'success'
def version(self):
""" Parsed and normalized package manager's own version.
Returns an instance of ``packaging.Version`` or None.
"""
if self.version_string:
return parse_version(self.version_string)
def check(version):
v = parse(version)
assert isinstance(v, Version), f'Invalid version: {version}'
assert not version.startswith('v')
def check_for_updates():
try:
print(colors.green+"checking for updates..."+colors.end)
r = requests.get("https://api.github.com/repos/4shadoww/hakkuframework/releases/latest")
if(r.ok):
items = json.loads(r.text or r.content)
rver = items['tag_name']
if "beta" in rver and "alpha" in info.version:
print(colors.green+"update found"+colors.end)
return True
elif "beta" not in rver and "alpha" not in rver:
if "beta" in info.version or "alpha" in info.version:
print(colors.green+"update found"+colors.end)
return True
elif version.parse(rver) > version.parse(info.version):
print(colors.green+"update found"+colors.end)
return True
else:
print(colors.yellow+"updates not found"+colors.end)
return False
else:
print("error")
except Exception as error:
print(colors.red+"error: "+str(error)+colors.end)
def parse(self, version_str: str, args: configargparse.Namespace) -> VersionContainer[PEP440Adapter]:
try:
version = _adapt(parse(version_str))
except InvalidVersion as e:
raise ErrorMessage("%s is not a valid PEP-440 version string: %s" %
(highlight(version_str), str(e))) from e
return VersionContainer(version, self.versionparser_name)
# mad hackz ahead
def deserialize(self, serialized: str) -> VersionContainer:
return VersionContainer(parse(serialized), self.versionparser_name)
def tag2version(tag):
from packaging import version
return version.parse(tag)
def _check_supported_version(current_version, supported_versions):
"""
The dna file contains supported Kalliope version for the module to install.
Check if supported versions are match the current installed version. If not, ask the user to confirm the
installation anyway
:param current_version: current version installed of Kalliope. E.g 0.4.0
:param supported_versions: list of supported version
:return: True if the version is supported or user has confirmed the installation
"""
logger.debug("[ResourcesManager] Current installed version of Kalliope: %s" % str(current_version))
logger.debug("[ResourcesManager] Module supported version: %s" % str(supported_versions))
supported_version_found = False
# Extract major version
match_current_version = re.search('^[\d]*[.][\d]*', current_version)
if match_current_version:
current_version = match_current_version.group(0)
for supported_version in supported_versions:
if version.parse(str(current_version)) == version.parse(str(supported_version)):
# we found the exact version
supported_version_found = True
break
if not supported_version_found:
# we ask the user if we want to install the module even if the version doesn't match
Utils.print_info("Current installed version of Kalliope: %s" % current_version)
Utils.print_info("Module supported versions: %s" % str(supported_versions))
Utils.print_warning("The neuron seems to be not supported by your current version of Kalliope")
supported_version_found = Utils.query_yes_no("install it anyway?")
logger.debug("[ResourcesManager] install it anyway user answer: %s" % supported_version_found)
logger.debug("[ResourcesManager] check_supported_version: %s" % str(supported_version_found))
return supported_version_found
def execute(self, branch: str):
checkUpgrade = True
local = self.getLocalVersion()
try:
pypi = self.getPypiVersion()
except GitcdPyPiApiException as e:
pypi = 'unknown'
message = str(e)
checkUpgrade = False
self.interface.info('Local %s' % local)
self.interface.info('PyPi %s' % pypi)
if checkUpgrade is False:
self.interface.error(message)
return
if version.parse(local) < version.parse(pypi):
upgrade = self.interface.askFor(
"Do you want me to upgrade gitcd for you?",
["yes", "no"],
"yes"
)
if upgrade == 'yes':
try:
pip.main(['install', '--user', '--upgrade', 'gitcd'])
return
except SystemExit as e:
self.interface.error('An error occured during the update!')
pass
self.interface.info(
'Please upgrade by running pip3 install gitcd --upgrade'
)
else:
self.interface.ok(
'You seem to be on the most recent version, congrats'
)
def _compile_requirements(self):
packages = set()
for requirement in self.distribution.install_requires:
requirement = Requirement.parse(requirement)
packages.add(requirement.key)
# Create the dependency map
entries = json.loads(
decode(subprocess.check_output(['pipdeptree', '--json'])))
entry_map = { entry['package']['key'] :
set([ dependency['key'] for dependency in entry['dependencies'] ]) for entry in entries }
while True:
for package in packages:
if package in entry_map and not entry_map[package] < packages:
packages.update(entry_map[package])
break
else:
break
module_files = set()
binary_files = set()
for package in packages:
in_header = True
root = None
for info in search_packages_info(package):
files = info['files']
root = info['location']
for file in files:
full_path = os.path.abspath(
os.path.join(root, file))
if full_path.endswith('.py') or full_path.endswith('.pyc'):
module_files.add(full_path)
if is_binary(full_path):
binary_files.add(full_path)
return module_files, binary_files
def version_check(version):
if version_parse(version.api) >= version_parse(version.cli):
logging.debug('version_check: PASSED')
else:
raise VersionCheckFailedError(version.cli, version.api)
def get_version(self):
describe_byte_string = subprocess.check_output(
['git', 'describe', '--match', 'v*.*.*'])
version_string = re.findall(self.version_pat, describe_byte_string)[0]
return version.parse(version_string)
def enrich_j2_environment(self, env):
def convert_ext_comm(s):
parts = s.split(":")
return "{} {}:{}".format(
parts[0], parts[1], parts[2]
)
def at_least_one_client_uses_tag_reject_policy():
for client in self.cfg_clients.cfg["clients"]:
policy = client["cfg"]["filtering"]["reject_policy"]["policy"]
if policy == "tag":
return True
return False
def community_is_set(comm):
if not comm:
return False
# OpenBGPD <= 6.0 does not implement large BGP communities,
# so only standard and extended ones are considered.
if version.parse(self.target_version or "6.0") < version.parse("6.1"):
if not comm["std"] and not comm["ext"]:
return False
else:
if not comm["std"] and not comm["ext"] and not comm["lrg"]:
return False
return True
env.filters["convert_ext_comm"] = convert_ext_comm
env.filters["community_is_set"] = community_is_set
self.data["at_least_one_client_uses_tag_reject_policy"] = \
at_least_one_client_uses_tag_reject_policy()
def get_latest_version(self, package):
pkg = self.get_package(package)
releases = {
(parse(rel), rel): first_upload(files)
# The unparsed version string needs to be kept around because the
# alternative approach (stringifying the Version object once
# comparisons are done) can result in a different string (e.g.,
# "2001.01.01" becomes "2001.1.1"), leading to a 404.
for rel, files in pkg["releases"].items()
}
candidates = releases.keys()
if not self.pre:
candidates = filter(lambda v: not v[0].is_prerelease, candidates)
if self.newest:
latest = max(
filter(releases.__getitem__, candidates),
key=releases.__getitem__,
default=None,
)
else:
latest = max(candidates, default=None)
if latest is None:
raise QyPIError(package + ': no suitable versions available')
latest = latest[1]
if pkg["info"]["version"] == latest:
return pkg
else:
return self.get_version(package, latest)
def squish_versions(releases):
"""
Given a list of `dict`s containing (at least) ``"name"`` and ``"version"``
fields, return for each name the `dict` with the highest version.
It is assumed that `dict`s with the same name are always adjacent.
"""
for _, versions in groupby(releases, itemgetter("name")):
yield max(versions, key=lambda v: parse(v["version"]))
def detect_available_upgrades(self, options):
self._prerelease = options.get('--prerelease', False)
explicit_packages_lower = None
if options['-p'] and options['-p'] != ['all']:
explicit_packages_lower = [pack_name.lower() for pack_name in options['-p']]
for i, package in enumerate(self.packages):
try:
package_name, pinned_version = self._expand_package(package)
if not package_name or not pinned_version: # pragma: nocover
# todo: treat <= or >= instead of ==
continue
if explicit_packages_lower and package_name.lower() not in explicit_packages_lower:
# skip if explicit and not chosen
continue
current_version = version.parse(pinned_version)
if pinned_version and isinstance(current_version, version.Version): # version parsing is correct
package_status, reason = self._fetch_index_package_info(package_name, current_version)
if not package_status: # pragma: nocover
print(package, reason)
continue
print('{}/{}: {} ... '.format(i + 1, len(self.packages), package_name), end='')
sys.stdout.flush()
# compare versions
if current_version < package_status['latest_version']:
print('upgrade available: {} ==> {} (uploaded on {})'.format(current_version,
package_status['latest_version'],
package_status['upload_time']))
else:
print('up to date: {}'.format(current_version))
sys.stdout.flush()
self.packages_status_map[package_name] = package_status
except Exception as e: # noqa # pragma: nocover
print('Error while parsing package {} (skipping). \nException: '.format(package), e)
return self.packages_status_map
def outdated(self):
""" Fetch outdated packages from ``brew outdated`` output.
Raw CLI output samples:
.. code-block:: shell-session
$ brew outdated --json=v1
[
{
"name": "cassandra",
"installed_versions": [
"3.5"
],
"current_version": "3.7"
},
{
"name": "vim",
"installed_versions": [
"7.4.1967"
],
"current_version": "7.4.1993"
},
{
"name": "youtube-dl",
"installed_versions": [
"2016.07.06"
],
"current_version": "2016.07.09.1"
}
]
"""
outdated = {}
# List available updates.
output = self.run(
[self.cli_path] + self.cli_args + ['outdated', '--json=v1'])
if output:
for pkg_info in json.loads(output):
# Parse versions to avoid lexicographic sorting gotchas.
version = None
versions = set(pkg_info['installed_versions'])
if versions:
_, version = max([(parse_version(v), v) for v in versions])
package_id = pkg_info['name']
outdated[package_id] = {
'id': package_id,
'name': package_id,
'installed_version': version,
'latest_version': pkg_info['current_version']}
return outdated
def installed(self):
""" Fetch installed packages from ``gem list`` output.
Raw CLI output samples:
.. code-block:: shell-session
$ gem list
*** LOCAL GEMS ***
bigdecimal (1.2.0)
CFPropertyList (2.2.8)
io-console (0.4.2)
json (1.7.7)
libxml-ruby (2.6.0)
molinillo (0.5.4, 0.4.5, 0.2.3)
nokogiri (1.5.6)
psych (2.0.0)
rake (0.9.6)
rdoc (4.0.0)
sqlite3 (1.3.7)
test-unit (2.0.0.0)
"""
installed = {}
output = self.run([self.cli_path] + self.cli_args + ['list'])
if output:
regexp = re.compile(r'(\S+) \((.+)\)')
for package in output.split('\n'):
match = regexp.match(package)
if match:
package_id, versions = match.groups()
# Guess latest installed version.
versions = set([v.strip() for v in versions.split(',')])
# Parse versions to avoid lexicographic sorting gotchas.
version = None
if versions:
_, version = max(
[(parse_version(v), v) for v in versions])
installed[package_id] = {
'id': package_id,
'name': package_id,
'installed_version': version}
return installed
def convert_result_and_expected_and_check(result, expected):
"""Convert a single resolver-run result to match what was expected
"""
if not isinstance(expected, dict):
raise YAMLException(
"The expected result of this test is not a dictionary."
)
if isinstance(result, CannotSatisfy):
result = _convert_error(result)
else:
result = _convert_resolved_set(result)
errors = []
if "conflicts" in expected:
if "conflicts" not in result:
message = "Expected to get conflicts, got resolved set"
raise AssertionError(message)
# TODO: Beef this up; maybe try to show what's messed up.
else:
assert "set" in expected, "set not in expected"
if "chosen_set" not in result:
message = "Expected to get resolved set, got conflicts"
raise AssertionError(message)
# Make sure we got the right versions
for item in expected["set"]:
name, version = item.split(" ", 1)
if name not in result["chosen_set"]:
errors.append(name + " is missing.")
continue
if parse_version(version) != result["chosen_set"][name].version:
errors.append(
"Expected {} to be version {}, got {}".format(
name, version, result["chosen_set"][name].version
)
)
del result["chosen_set"][name]
# Make sure we got the right packages
if result["chosen_set"]:
for key in result["chosen_set"]:
errors.append(
"Got unexpected selection: {} {}".format(
key, result["chosen_set"][key].version
)
)
# TODO: Check the graph of dependencies
if errors:
raise AssertionError("Incorrect resolution:\n- " + "\n- ".join(errors))
def check_new_release(self, print_output=False):
checker = LastVersion(
cache_dir=program_config.get_dir("cache_dir"),
cache_expiry={"general": 604800}
)
try:
checker.load_data()
except LastVersionCheckingError as e:
if print_output:
print(str(e))
else:
logging.warning(str(e))
return
last_version = checker.last_version
if not last_version:
msg = "Can't understand the latest version: empty response"
if print_output:
print(msg)
else:
logging.warning(msg)
return
try:
version.parse(last_version)
except Exception as e:
msg = "Can't understand the latest version: {}".format(str(e))
if print_output:
print(msg)
else:
logging.warning(msg)
new_rel = version.parse(last_version) > version.parse(__version__)
url = "https://github.com/pierky/arouteserver/releases"
if print_output:
if new_rel:
print("A new release of ARouteServer is available")
print("Details at " + url)
else:
print("No new releases are available")
print("")
print("Current version: {}".format(__version__))
print("Latest version : {}".format(last_version))
else:
if new_rel:
logging.warning("A new release is available: {} "
"(running version: {}) - "
"Details at {}".format(
last_version, __version__, url
))