def __loadTkdnd(self):
global TkDND
if TkDND is None:
try:
tkdndlib = os.path.join(os.path.dirname(os.path.abspath(__file__)), "lib", "tkdnd2.8")
os.environ['TKDND_LIBRARY'] = tkdndlib
from appJar.lib.TkDND_wrapper import TkDND
self.dnd = TkDND(self.topLevel)
except:
TkDND = False
# function to receive DnD events
python类__file__()的实例源码
def _read_httplib2_default_certs():
import httplib2 # import error should not happen here, and will be well handled by outer called
httplib_dir = os.path.dirname(os.path.abspath(httplib2.__file__))
ca_certs_path = os.path.join(httplib_dir, HTTPLIB2_CA_CERT_FILE_NAME)
return _read_pem_file(ca_certs_path)
def _get_temp_cert_file_dir():
import __main__
app_root = op.dirname(op.dirname(op.abspath(__main__.__file__)))
temp_dir = op.join(app_root, 'temp_certs')
if not op.isdir(temp_dir):
try:
os.mkdir(temp_dir)
except:
pass
for candidate in ['temp_certs', 'local', 'default']:
dir_path = op.join(app_root, candidate)
if op.isdir(dir_path):
return dir_path
return app_root
def _read_httplib2_default_certs():
import httplib2 # import error should not happen here, and will be well handled by outer called
httplib_dir = os.path.dirname(os.path.abspath(httplib2.__file__))
ca_certs_path = os.path.join(httplib_dir, HTTPLIB2_CA_CERT_FILE_NAME)
return _read_pem_file(ca_certs_path)
def _get_temp_cert_file_dir():
import __main__
app_root = op.dirname(op.dirname(op.abspath(__main__.__file__)))
temp_dir = op.join(app_root, 'temp_certs')
if not op.isdir(temp_dir):
try:
os.mkdir(temp_dir)
except:
pass
for candidate in ['temp_certs', 'local', 'default']:
dir_path = op.join(app_root, candidate)
if op.isdir(dir_path):
return dir_path
return app_root
def newfile(prefix,idstring,colnames=None,mypath = './',usedate=True,usefolder=True):
import __main__
if hasattr(__main__, '__file__'):
mainfile = __main__.__file__
else:
mainfile = None
print(mainfile)
mytime = datetime.datetime.now()
datecode = '%s' % mytime.year + '_' + ('%s' % mytime.month).zfill(2) + '_' +('%s' % mytime.day).zfill(2)
timecode = ('%s' % mytime.hour).zfill(2) +'.' + ('%s' % mytime.minute).zfill(2) +'.' + ('%s' % mytime.second).zfill(2)
if usedate:
foldername = prefix + '_' + datecode+'_'+timecode+'_'+idstring
else:
foldername = prefix + '_' +idstring
filename = foldername+'.dat'
if usefolder:
fullfoldername = os.path.normpath(mypath + '/' + foldername)
fullfilename = os.path.normpath(mypath + '/' + foldername + '/' + filename)
else:
fullfoldername = os.path.normpath(mypath + '/')
fullfilename = os.path.normpath(mypath + '/' + filename)
print(fullfoldername)
print(fullfilename)
if not os.path.exists(fullfoldername):
os.makedirs(fullfoldername)
print('Measurement Name: ', foldername)
if mainfile !=None and usefolder:
scriptname = os.path.basename(mainfile)
shutil.copyfile(mainfile,os.path.normpath(fullfoldername + '/' + scriptname))
myfile = open(fullfilename, 'w')
if colnames!=None:
varline = '#' + ', '.join(colnames) + '\n'
myfile.write(varline)
return myfile
def compute_function_id(func_name, func):
"""Compute an function ID for a function.
Args:
func_name: The name of the function (this includes the module name plus
the function name).
func: The actual function.
Returns:
This returns the function ID.
"""
function_id_hash = hashlib.sha1()
# Include the function name in the hash.
function_id_hash.update(func_name.encode("ascii"))
# If we are running a script or are in IPython, include the source code in
# the hash. If we are in a regular Python interpreter we skip this part
# because the source code is not accessible. If the function is a built-in
# (e.g., Cython), the source code is not accessible.
import __main__ as main
if (hasattr(main, "__file__") or in_ipython()) \
and inspect.isfunction(func):
function_id_hash.update(inspect.getsource(func).encode("ascii"))
# Compute the function ID.
function_id = function_id_hash.digest()
assert len(function_id) == 20
function_id = FunctionID(function_id)
return function_id
def print_usage():
script_name = os.path.basename(__file__)
configs = []
for name, args in sorted(CONFIGS.items()):
if name == DEFAULT_CONFIG_NAME:
name += " (default)"
if name == DEBUG_CONFIG_NAME:
name += " (default with --debug)"
configs.append(name + "\n " + " ".join(args))
configs_string = "\n ".join(configs)
cmake_name = os.path.basename(CMAKE)
make_name = os.path.basename(MAKE)
generator_name = CMAKE_GENERATOR.lower()
default_config_name = DEFAULT_CONFIG_NAME
debug_config_name = DEBUG_CONFIG_NAME
print("""Usage: {script_name} [BUILD [BUILD ...]] [--all] [--debug] [MAKE_OPTIONS]
Build one or more predefined build configurations of Fast Downward. Each build
uses {cmake_name} to generate {generator_name} and then uses {make_name} to compile the
code. Build configurations differ in the parameters they pass to {cmake_name}.
Build configurations
{configs_string}
--all Alias to build all build configurations.
--debug Alias to build the default debug build configuration.
--help Print this message and exit.
Make options
All other parameters are forwarded to {make_name}.
Example usage:
./{script_name} -j4 # build {default_config_name} in 4 threads
./{script_name} -j4 downward # as above, but only build the planner
./{script_name} debug32 -j4 # build debug32 in 4 threads
./{script_name} --debug -j4 # build {debug_config_name} in 4 threads
./{script_name} release64 debug64 # build both 64-bit build configs
./{script_name} --all VERBOSE=true # build all build configs with detailed logs
""".format(**locals()))
def get_project_root_path():
import __main__
return os.path.dirname(__main__.__file__)
def mail_on_exception():
import config
import mail
def excepthook(etype, value, tb):
try:
script_name = pathlib.Path(__main__.__file__).name
except AttributeError:
script_name = 'script'
if etype.__module__ == 'builtins':
ename = etype.__name__
else:
ename = etype.__module__ + '.' + etype.__name__
subject = f'[SNH48Live] {script_name} failed with {ename}'
emsg = ''.join(traceback.format_exception(etype, value, tb))
if config.main.notifications:
mail.send_mail(subject, emsg, config.main.mailto)
sys.stderr.write(emsg)
sys.exit(1)
if config.main.notifications:
mail.init_gmail_client()
sys.excepthook = excepthook
def print_usage():
script_name = os.path.basename(__file__)
configs = []
for name, args in sorted(CONFIGS.items()):
if name == DEFAULT_CONFIG_NAME:
name += " (default)"
if name == DEBUG_CONFIG_NAME:
name += " (default with --debug)"
configs.append(name + "\n " + " ".join(args))
configs_string = "\n ".join(configs)
cmake_name = os.path.basename(CMAKE)
make_name = os.path.basename(MAKE)
generator_name = CMAKE_GENERATOR.lower()
default_config_name = DEFAULT_CONFIG_NAME
debug_config_name = DEBUG_CONFIG_NAME
print("""Usage: {script_name} [BUILD [BUILD ...]] [--all] [--debug] [MAKE_OPTIONS]
Build one or more predefined build configurations of Fast Downward. Each build
uses {cmake_name} to generate {generator_name} and then uses {make_name} to compile the
code. Build configurations differ in the parameters they pass to {cmake_name}.
Build configurations
{configs_string}
--all Alias to build all build configurations.
--debug Alias to build the default debug build configuration.
--help Print this message and exit.
Make options
All other parameters are forwarded to {make_name}.
Example usage:
./{script_name} -j4 # build {default_config_name} in 4 threads
./{script_name} -j4 downward # as above, but only build the planner
./{script_name} debug32 -j4 # build debug32 in 4 threads
./{script_name} --debug -j4 # build {debug_config_name} in 4 threads
./{script_name} release64 debug64 # build both 64-bit build configs
./{script_name} --all VERBOSE=true # build all build configs with detailed logs
""".format(**locals()))
def get_project_root_path():
import __main__
return os.path.dirname(__main__.__file__)
def run_script(args):
sys.argv = [args.script] + args.script_arguments
path = args.script
__main__.__file__ = path
try:
code = get_code(path)
except Exception as e:
traceback.print_exception(e.__class__, e, None, file=sys.stderr)
else:
try:
exec(code, __main__.__dict__)
except BaseException as e:
if not sys.flags.inspect and isinstance(e, SystemExit):
raise
elif PY2: # Python 2 produces tracebacks in mixed encoding (!)
etype, e, tb = sys.exc_info()
for line in traceback.format_exception(etype, e, tb.tb_next):
line = line.decode("utf-8", "replace")
try:
sys.stderr.write(line)
except UnicodeEncodeError:
line = line.encode(sys.stderr.encoding, "backslashreplace")
sys.stderr.write(line)
sys.stderr.flush() # is this needed?
else: # PY3
traceback.print_exception(e.__class__, e, e.__traceback__.tb_next, file=sys.stderr)
def get_default_config():
root_dir = os.path.expanduser("~")
script_instance_path = os.path.join(root_dir,"lgsm-core")
script_game_path = os.path.realpath(script_instance_path)
try:
if __name__ != "__main__":
script_instance_path = main.__file__
except:
pass
#root_dir = os.path.dirname(script_game_path)
date_format = "%Y-%m-%d-%H-%M-%S"
date_string = datetime.datetime.today().strftime(date_format)
arch = sys.platform
config = {
"lgsm_script": "lgsm-core",
"date_format": date_format,
"date_string": date_string,
"root_dir": root_dir,
"platform": "steam",
"lgsm_dir": "%(root_dir)s/.lgsm",
"lgsm_branch": "%(github_branch)s",
"lgsm_repo": "lgsm-python",
"lgsm_user": "%(github_user)s",
"github_update": True,
"github_user": "jaredballou",
"github_branch": "master",
"script_cfg": "%(lgsm_dir)s/config",
"gamedata_dir": "%(lgsm_dir)s/gamedata",
"gamedata_repo": "lgsm-gamedata",
"gamedata_user": "%(github_user)s",
"gamedata_branch": "%(github_branch)s",
"script_game": os.path.basename(script_game_path),
"script_game_path": script_game_path,
"script_game_cfg_dir": "%(lgsm_dir)s/config/%(script_game)s",
"script_instance": os.path.basename(script_instance_path),
"script_instance_path": script_instance_path,
"script_instance_cfg": "%(script_game_cfg_dir)s/%(script_instance)s",
}
return config
def __setup_working_dir__(self):
# fix permissions of workspace root
make_sure_path_exists(self.workspace)
try:
chmod(self.workspace, 0o775)
except PermissionError:
print("PermissionError when trying to change permissions of workspace to 775")
# setup working directory
specs_dir = os.path.realpath("{}/{}".format(self.workspace, self.specs))
working_dir = os.path.realpath("{}/{}".format(specs_dir, self.timestamp))
# Set up result folder structure
results_path = "{}/results".format(working_dir, self.timestamp)
make_sure_path_exists(results_path)
# Set up tensorboard directory
tensorboard = "{}/tensorboard".format(working_dir, self.timestamp)
make_sure_path_exists(tensorboard)
# set path to kill file (if this file exists abort run)
kill_file_name = "ABORT_RUN"
kill_file = os.path.join(working_dir, kill_file_name)
# create plot file to plot by default
plot_file_name = "PLOT_ON"
plot_file = os.path.join(working_dir, plot_file_name)
touch(plot_file)
# remove kill file before starting the run (should not exist anyway)
if os.path.isfile(kill_file):
os.remove(kill_file)
# fix permissions to grant group write access (to allow kill_file creation and plot control)
try:
chmod(self.workspace, 0o775, recursive=False)
chmod(specs_dir, 0o775, recursive=False)
chmod(working_dir, 0o775, recursive=True)
chmod(plot_file, 0o664)
except PermissionError:
print("PermissionError when trying to change permissions of workspace to 775")
# compress and copy current script and dependencies to results dir
command = " ".join(sys.argv)
# copy current code to temp dir
script_dir = os.path.dirname(os.path.realpath(__main__.__file__))
tempdir = tempfile.mkdtemp("tell")
copydir(script_dir, tempdir,
exclude=[self.workspace, os.path.join(script_dir, ".git"), os.path.join(script_dir, ".idea"),
os.path.join(script_dir, "__pycache__")])
# also copy currently used TeLL library so it can be used for resuming runs
copydir(TeLL.__path__[0], os.path.join(tempdir, os.path.basename(TeLL.__path__[0])))
rmdir(os.path.join(os.path.join(tempdir, os.path.basename(TeLL.__path__[0])), "__pycache__"))
zipdir(dir=tempdir, zip=os.path.join(working_dir, '00-script.zip'), info=command,
exclude=[self.workspace, '.git'])
rmdir(tempdir)
return [working_dir, results_path, tensorboard, kill_file, plot_file, None]
def assertExpected(self, s, subname=None):
"""
Test that a string matches the recorded contents of a file
derived from the name of this test and subname. This file
is placed in the 'expect' directory in the same directory
as the test script. You can automatically update the recorded test
output using --accept.
If you call this multiple times in a single function, you must
give a unique subname each time.
"""
if not (isinstance(s, str) or (sys.version_info[0] == 2 and isinstance(s, unicode))):
raise TypeError("assertExpected is strings only")
def remove_prefix(text, prefix):
if text.startswith(prefix):
return text[len(prefix):]
return text
munged_id = remove_prefix(self.id(), "__main__.")
# NB: we take __file__ from __main__, so we place the expect directory
# where the test script lives, NOT where test/common.py lives. This
# doesn't matter in PyTorch where all test scripts are in the same
# directory as test/common.py, but it matters in onnx-pytorch
expected_file = os.path.join(os.path.dirname(os.path.realpath(__main__.__file__)),
"expect",
munged_id)
if subname:
expected_file += "-" + subname
expected_file += ".expect"
expected = None
def accept_output(update_type):
print("Accepting {} for {}:\n\n{}".format(update_type, munged_id, s))
with open(expected_file, 'w') as f:
f.write(s)
try:
with open(expected_file) as f:
expected = f.read()
except IOError as e:
if e.errno != errno.ENOENT:
raise
elif ACCEPT:
return accept_output("output")
else:
raise RuntimeError(
("I got this output for {}:\n\n{}\n\n"
"No expect file exists; to accept the current output, run:\n"
"python {} {} --accept").format(munged_id, s, __main__.__file__, munged_id))
if ACCEPT:
if expected != s:
return accept_output("updated output")
else:
if hasattr(self, "assertMultiLineEqual"):
# Python 2.7 only
# NB: Python considers lhs "old" and rhs "new".
self.assertMultiLineEqual(expected, s)
else:
self.assertEqual(s, expected)
def __init__(self, name, description, commandline_args=[]):
"""Command line arguments can be a list of shortcuts from
`predefined_args`, or a list of dictionaries. Arguments can also
be put in a file named SCRIPTNAME_args.py, e.g. `harvest_args.py`.
"""
self.parser = argparse.ArgumentParser(description)
# Add one ubiqitous command line arguments
commandline_args += ["loglevel"]
# Check for FILENAME_args.py file
import __main__
import os
try:
filename = os.path.basename(__main__.__file__)
filename = os.path.splitext(filename)[0]
args_from_file = __import__(filename + "_args")
commandline_args += args_from_file.args
except ImportError:
pass
# Add all the command line arguments
for c in commandline_args:
# cCheck for shortcuts used
if isinstance(c, str):
c = self.predefined_args[c]
self.parser.add_argument(
c.pop("short", None),
c.pop("long", None),
**c)
argcomplete.autocomplete(self.parser)
self.args = self.parser.parse_args()
self.logger = logging.getLogger(name)
# https://docs.python.org/2/library/logging.html#levels
self.logger.setLevel(self.args.loglevel * 10)
self.executionMode = self.NORMAL_MODE
# Convenience shortcuts to logger methods