def list(self):
resp = None
self.parser.add_argument('--type', help="Type of unit valid values are\
containers, nodes", required=True)
args = self.parser.parse_args()
unit_type = vars(args)['type']
data = {"sub_type": unit_type}
galaxia_api_endpoint = os.getenv("galaxia_api_endpoint")
target_url = client.concatenate_url(galaxia_api_endpoint, self.metrics_uri)
try:
resp = client.http_request('GET', target_url, self.headers, data)
headers = ["NAME", "DESCRIPTION"]
print "List of supported metrics for "+unit_type
format_print.format_dict(resp.json(), headers)
except Exception as ex:
pass
python类getenv()的实例源码
def sample(self):
resp = None
self.parser.add_argument('--type', help="Type of unit valid values are\
containers, nodes", required=True)
self.parser.add_argument('--search-string', help='Search String', required=False)
self.parser.add_argument('--search-type', help='Search String', required=False)
self.parser.add_argument('--meter-name', help='Name of the meter', required=True)
args = self.parser.parse_args()
data = {"type": vars(args)['type'], "search_string": vars(args)['search_string'],
"search_type": vars(args)['search_type'] , "meter_name": vars(args)['meter_name']}
galaxia_api_endpoint = os.getenv("galaxia_api_endpoint")
target_url = client.concatenate_url(galaxia_api_endpoint, self.sample_uri)
try:
resp = client.http_request('GET', target_url, self.headers, data)
headers = ["NAME", "VALUE"]
print "Current "+ vars(args)['meter_name']
#print "Current "+unit_type
format_print.format_dict(resp.json(), headers)
except Exception as ex:
pass
def create(self):
self.parser.add_argument('--name', help='Name of the dashboard', required=True)
self.parser.add_argument('--metrics-list', nargs='+')#help='List of \
# metrics to be displayed on the dashboard',
# required=True)
self.parser.add_argument('--names-list', help='Names list of \
units to plot in dashboard')
self.parser.add_argument('--search-string', help='Search String')
self.parser.add_argument('--search-type', help='Search String')
self.parser.add_argument('--unit-type', help='Type of unit, valid value is docker')
self.parser.add_argument('--exclude', help='Search excluding search string', required=False)
args = self.parser.parse_args()
if not (args.names_list or (args.search_string and args.search_type)):
self.parser.error('add --names-list or (--search-string and --search-type)')
json_data = client.create_request_data(**vars(args))
print json_data
galaxia_api_endpoint = os.getenv("galaxia_api_endpoint")
target_url = client.concatenate_url(galaxia_api_endpoint, self.url)
try:
resp = client.http_request('PUT', target_url, self.headers, json_data)
print resp.text
except Exception as ex:
pass
def oldest_peer(peers):
"""Determines who the oldest peer is by comparing unit numbers."""
local_unit_no = int(os.getenv('JUJU_UNIT_NAME').split('/')[1])
for peer in peers:
remote_unit_no = int(peer.split('/')[1])
if remote_unit_no < local_unit_no:
return False
return True
def get_chrome_path():
if win_client():
PathName = os.getenv('localappdata') + '\\Google\\Chrome\\User Data\\Default\\'
if (os.path.isdir(PathName) == False):
return "[!] Chrome Doesn't exists", False
if osx_client():
PathName = os.getenv('HOME') + "/Library/Application Support/Google/Chrome/Default/"
if (os.path.isdir(PathName) == False):
return "[!] Chrome Doesn't exists", False
if lnx_client():
PathName = os.getenv('HOME') + '/.config/google-chrome/Default/'
if (os.path.isdir(PathName) == False):
return "[!] Chrome Doesn't exists", False
return PathName, True
def __init__(self):
self.__path_home = os.getenv("HOME")
self.__path_config = self.__path_home + "/.config/mpis"
self.__path_file = "/usr/share/mpis"
self.__path_tr = os.path.join(self.__path_config, "locale")
self.__path_db = os.path.join(self.__path_config, "db")
def init_from_url(self, snapshot=-1, thingpedia_url=None):
if thingpedia_url is None:
thingpedia_url = os.getenv('THINGPEDIA_URL', 'https://thingpedia.stanford.edu/thingpedia')
ssl_context = ssl.create_default_context()
with urllib.request.urlopen(thingpedia_url + '/api/snapshot/' + str(snapshot) + '?meta=1', context=ssl_context) as res:
self._process_devices(json.load(res)['data'])
with urllib.request.urlopen(thingpedia_url + '/api/entities?snapshot=' + str(snapshot), context=ssl_context) as res:
self._process_entities(json.load(res)['data'])
def get_thingpedia(input_words, workdir, snapshot):
thingpedia_url = os.getenv('THINGPEDIA_URL', 'https://thingpedia.stanford.edu/thingpedia')
output = dict()
with urllib.request.urlopen(thingpedia_url + '/api/snapshot/' + str(snapshot) + '?meta=1', context=ssl_context) as res:
output['devices'] = json.load(res)['data']
for device in output['devices']:
if device['kind_type'] == 'global':
continue
if device['kind_canonical']:
add_words(input_words, device['kind_canonical'])
else:
print('WARNING: missing canonical for tt-device:%s' % (device['kind'],))
for function_type in ('triggers', 'queries', 'actions'):
for function_name, function in device[function_type].items():
if not function['canonical']:
print('WARNING: missing canonical for tt:%s.%s' % (device['kind'], function_name))
else:
add_words(input_words, function['canonical'])
for argname, argcanonical in zip(function['args'], function['argcanonicals']):
if argcanonical:
add_words(input_words, argcanonical)
else:
add_words(input_words, clean(argname))
for argtype in function['schema']:
if not argtype.startswith('Enum('):
continue
enum_entries = argtype[len('Enum('):-1].split(',')
for enum_value in enum_entries:
add_words(input_words, clean(enum_value))
with urllib.request.urlopen(thingpedia_url + '/api/entities?snapshot=' + str(snapshot), context=ssl_context) as res:
output['entities'] = json.load(res)['data']
for entity in output['entities']:
if entity['is_well_known'] == 1:
continue
add_words(input_words, tokenize(entity['name']))
with open(os.path.join(workdir, 'thingpedia.json'), 'w') as fp:
json.dump(output, fp, indent=2)
def main():
np.random.seed(1234)
workdir = sys.argv[1]
if len(sys.argv) > 2:
snapshot = int(sys.argv[2])
else:
snapshot = -1
if len(sys.argv) > 3:
embed_size = int(sys.argv[3])
else:
embed_size = 300
dataset = os.getenv('DATASET', workdir)
glove = os.getenv('GLOVE', os.path.join(workdir, 'glove.42B.300d.txt'))
download_glove(glove)
input_words = set()
# add the canonical words for the builtin functions
add_words(input_words, 'now nothing notify return the event')
create_dictionary(input_words, dataset)
get_thingpedia(input_words, workdir, snapshot)
save_dictionary(input_words, workdir)
trim_embeddings(input_words, workdir, embed_size, glove)
def read_config():
config = ConfigParser()
config.read([path.join(BASE_DIR, 'settings.ini'), os.getenv('CONF_FILE', '')])
return config
def read_environment(self):
''' Reads the settings from environment variables '''
# Setup credentials
if os.getenv("DO_API_TOKEN"):
self.api_token = os.getenv("DO_API_TOKEN")
if os.getenv("DO_API_KEY"):
self.api_token = os.getenv("DO_API_KEY")
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
r"""Return full path to the user-specific config dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user data directories are:
macOS: same as user_data_dir
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
Win *: same as user_data_dir
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
That means, by deafult "~/.config/<AppName>".
"""
if system in ["win32", "darwin"]:
path = user_data_dir(appname, appauthor, None, roaming)
else:
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def user_config_dir(appname, roaming=True):
"""Return full path to the user-specific config dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"roaming" (boolean, default True) can be set False to not use the
Windows roaming appdata directory. That means that for users on a
Windows network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user data directories are:
macOS: same as user_data_dir
Unix: ~/.config/<AppName>
Win *: same as user_data_dir
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
That means, by default "~/.config/<AppName>".
"""
if WINDOWS:
path = user_data_dir(appname, roaming=roaming)
elif sys.platform == "darwin":
path = user_data_dir(appname)
else:
path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config"))
path = os.path.join(path, appname)
return path
# for the discussion regarding site_config_dirs locations
# see <https://github.com/pypa/pip/issues/1733>
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
r"""Return full path to the user-specific config dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user data directories are:
Mac OS X: same as user_data_dir
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
Win *: same as user_data_dir
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
That means, by deafult "~/.config/<AppName>".
"""
if system in ["win32", "darwin"]:
path = user_data_dir(appname, appauthor, None, roaming)
else:
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def check_for_tokens():
'''
Checks for token present in system environment. To set them, export them
in your ~/.bashrc or ~/.zshrc
'''
log.debug('Checking for tokens')
kite_api_key = getenv('KITE_API_KEY')
kite_request_token = getenv('KITE_REQUEST_TOKEN')
kite_secret = getenv('KITE_SECRET')
# Get your request token from the first time
# kite.trade/connect/login?api_key=<>
log.debug("Tokens fetched: {} {} ".format(kite_api_key,
kite_secret,))
if kite_api_key is None or kite_secret is None:
print('''
You need to add your Kite API token,
along with Secret Key. \n
export KITE_API_KEY='your-kite-api-key'
export KITE_SECRET='your-kite-secret-key'
\n
You can fetch it from here : https://developers.kite.trade/apps
''')
return False
log.debug("Kite Request Token: {}".format(kite_request_token))
if kite_request_token is None:
print('''
Set your request token.
You can do this by setting environment variables: \n
export KITE_REQUEST_TOKEN='your-kite-request-token' \n
Generate request token from
https://kite.trade/connect/login?api_key=<>
''')
return False
return True
def __load_dictionary(self, config):
var = config.get_value('engine/replace-with-kanji-python', 'dictionary')
if var is None or var.get_type_string() != 's':
path = os.path.join(os.getenv('IBUS_REPLACE_WITH_KANJI_LOCATION'), 'restrained.dic')
if var:
config.unset('engine/replace-with-kanji-python', 'dictionary')
else:
path = var.get_string()
return Dictionary(path)
def __init__(self, path):
logger.info("Dictionary(%s)", path)
self.__dict_base = {}
self.__dict = {}
self.__yomi = ''
self.__no = 0
self.__cand = []
self.__numeric = ''
self.__dirty = False
self.__orders_path = ''
# Load Katakana dictionary first so that Katakana words come after Kanji words.
katakana_path = os.path.join(os.getenv('IBUS_REPLACE_WITH_KANJI_LOCATION'), 'katakana.dic')
self.__load_dict(self.__dict_base, katakana_path)
# Load system dictionary
self.__load_dict(self.__dict_base, path)
# Load private dictionary
self.__dict = self.__dict_base.copy()
my_path = os.path.expanduser('~/.local/share/ibus-replace-with-kanji/my.dic')
self.__load_dict(self.__dict, my_path, 'a+')
base = os.path.basename(path)
if base:
self.__orders_path = os.path.expanduser('~/.local/share/ibus-replace-with-kanji')
self.__orders_path = os.path.join(self.__orders_path, base)
self.__load_dict(self.__dict, self.__orders_path, 'a+', version_checked=False)
def __init__(self):
'''Constructor.'''
logging.basicConfig()
random.seed()
self._slack_bot_token = os.getenv('SLACK_BOT_TOKEN')
self._slack = slackclient.SlackClient(self._slack_bot_token)
self._slack_bot_id = self._get_bot_id()
self._at_bot = '<@' + str(self._slack_bot_id) + '>'
self._scraper = scraper.Scraper()
self._polls = {}
self.is_running = True
self._reaction_interval = 1
self._keywords = [
'belly',
'bite',
'eat',
'food',
'lunch',
'meal',
'menu',
'offer',
'stomach'
]
self._thread = threading.Thread(None, self._loop_messages)
self._thread.start()
def gather_mpi_arguments(hostfile, params):
from mpi4py import MPI
vendor = MPI.get_vendor()
print_and_log(['MPI detected: %s' % str(vendor)], 'debug', logger)
if vendor[0] == 'Open MPI':
mpi_args = ['mpirun']
if os.getenv('LD_LIBRARY_PATH'):
mpi_args += ['-x', 'LD_LIBRARY_PATH']
if os.getenv('PATH'):
mpi_args += ['-x', 'PATH']
if os.getenv('PYTHONPATH'):
mpi_args += ['-x', 'PYTHONPATH']
if os.path.exists(hostfile):
mpi_args += ['-hostfile', hostfile]
elif vendor[0] == 'Microsoft MPI':
mpi_args = ['mpiexec']
if os.path.exists(hostfile):
mpi_args += ['-machinefile', hostfile]
elif vendor[0] == 'MPICH2':
mpi_args = ['mpiexec']
if os.path.exists(hostfile):
mpi_args += ['-f', hostfile]
elif vendor[0] == 'MPICH':
mpi_args = ['mpiexec']
if os.path.exists(hostfile):
mpi_args += ['-f', hostfile]
else:
print_and_log([
'%s may not be yet properly implemented: contact developpers' %
vendor[0]], 'error', logger)
mpi_args = ['mpirun']
if os.path.exists(hostfile):
mpi_args += ['-hostfile', hostfile]
return mpi_args
def resolve_nested_variables(values):
def _replacement(name):
"""
get appropiate value for a variable name.
first search in environ, if not found,
then look into the dotenv variables
"""
ret = os.getenv(name, values.get(name, ""))
return ret
def _re_sub_callback(match_object):
"""
From a match object gets the variable name and returns
the correct replacement
"""
return _replacement(match_object.group()[2:-1])
for k, v in values.items():
values[k] = __posix_variable.sub(_re_sub_callback, v)
return values