def run(self):
# run original build code
build.run(self)
cmd = [
'make',
]
def compile():
call(cmd, cwd=os.path.join(BASEPATH, "_eSSP"))
self.execute(compile, [], 'Compiling library')
if not self.dry_run:
self.copy_file(os.path.join(BASEPATH, "_eSSP", "libessp.so"),
os.path.join(self.build_lib, "eSSP"))
python类run()的实例源码
def run(self):
_install.run(self)
print("Installing udev rules...")
if not os.path.isdir("/etc/udev/rules.d"):
print("WARNING: udev rules have not been installed (/etc/udev/rules.d is not a directory)")
return
try:
shutil.copy("./rivalcfg/data/99-steelseries-rival.rules", "/etc/udev/rules.d/")
except IOError:
print("WARNING: udev rules have not been installed (permission denied)")
return
try:
subprocess.call(["udevadm", "trigger"])
except OSError:
print("WARNING: unable to update udev rules, please run the 'udevadm trigger' command")
return
print("Done!")
def run(self):
install.run(self)
sys.path.reverse()
# ----------- install segmenter ------------
import stanford_segmenter
pwd = stanford_segmenter.__path__[0]
if not isdir(join(pwd, 'seg')):
print('Start downloading stanford-segmenter-2015-12-09.zip...')
urlretrieve('http://nlp.stanford.edu/software/stanford-segmenter-2015-12-09.zip', 'seg.zip', report)
with zipfile.ZipFile('seg.zip', 'r') as z:
z.extractall(pwd)
rename(join(pwd, 'stanford-segmenter-2015-12-09'), join(pwd, 'seg'))
unlink('seg.zip')
# ----------- install postagger ------------
import stanford_postagger
pwd = stanford_postagger.__path__[0]
if not isdir(join(pwd, 'pos')):
print('Start downloading stanford-postagger-full-2015-12-09.zip...')
urlretrieve('http://nlp.stanford.edu/software/stanford-postagger-full-2015-12-09.zip', 'pos.zip', report)
with zipfile.ZipFile('pos.zip', 'r') as z:
z.extractall(pwd)
rename(join(pwd, 'stanford-postagger-full-2015-12-09'), join(pwd, 'pos'))
unlink('pos.zip')
def run(self):
install.run(self)
platform = self._get_platform()
library_full_path = self._get_install_full_path(
self._get_base_install_path(),
self._LIBRARY_NAME[platform][1])
get_latest_request = Request('https://github.com/yamachu/World/releases/latest',
headers={'Accept': 'application/json'})
get_latest_response = urlopen(get_latest_request)
response_str = get_latest_response.read().decode('utf-8')
response_json = json.loads(response_str)
latest_version = response_json['tag_name']
urlretrieve("{}/{}/{}".format(
self._DOWNLOAD_BASE_URL,
latest_version,
self._LIBRARY_NAME[platform][0]), library_full_path)
def run(self):
# Install all requirements
failed = []
for req in requirements:
if pip.main(["install", req]) == 1:
failed.append(req)
if len(failed) > 0:
print("")
print("Error installing the following packages:")
print(str(failed))
print("Please install them manually")
print("")
raise OSError("Aborting")
# install MlBox
install.run(self)
def run(self):
if not os.path.exists("build"):
os.mkdir("build")
if os.path.exists(".git"):
try:
self.gen_authors()
self.gen_changelog()
sdist.run(self)
finally:
files = ["AUTHORS",
"ChangeLog"]
for item in files:
if os.path.exists(item):
os.unlink(item)
else:
sdist.run(self)
def update_version_py():
if not os.path.isdir(".git"):
print("This does not appear to be a Git repository.")
return
try:
p = subprocess.Popen(["git", "describe",
"--tags", "--always"],
stdout=subprocess.PIPE)
except EnvironmentError:
print("unable to run git, leaving eden/_version.py alone")
return
stdout = p.communicate()[0]
if p.returncode != 0:
print("unable to run git, leaving eden/_version.py alone")
return
ver = stdout.strip()
ver = str(int(ver,16)) # pypi doesnt like base 16
f = open("graphlearn/_version.py", "w")
f.write(VERSION_PY % ver)
f.close()
print("set graphlearn/_version.py to '%s'" % ver)
def run(self):
_install.run(self)
# Do what distutils install_data used to do... *sigh*
# Despite what the setuptools docs say, the omission of this
# in setuptools is a bug, not a feature.
print("== Installing Nautilus Python extension...")
src_file = "nautilus_terminal/nautilus_terminal_extension.py"
dst_dir = os.path.join(self.install_data, "share/nautilus-python/extensions")
self.mkpath(dst_dir)
dst_file = os.path.join(dst_dir, os.path.basename(src_file))
self.copy_file(src_file, dst_file)
print("== Done!")
print("== Installing GSettings Schema")
src_file = "./nautilus_terminal/schemas/org.flozz.nautilus-terminal.gschema.xml"
dst_dir = os.path.join(self.install_data, "share/glib-2.0/schemas")
self.mkpath(dst_dir)
dst_file = os.path.join(dst_dir, os.path.basename(src_file))
self.copy_file(src_file, dst_file)
print("== Done! Run 'glib-compile-schemas /usr/share/glib-2.0/schemas/' for a global installation to compile the schema.")
def run(self):
install.run(self)
WYRM_PATH = os.path.join(self.install_scripts, 'wyrm')
AIOWEB_SHARE = os.path.join(self.install_base, 'share/aioweb/')
print("creating %s" % WYRM_PATH)
if os.path.exists(WYRM_PATH):
os.unlink(WYRM_PATH)
shutil.copy2("bin/wyrm", self.install_scripts)
print("coping generators")
if os.path.exists(AIOWEB_SHARE):
shutil.rmtree(AIOWEB_SHARE)
os.mkdir(AIOWEB_SHARE)
shutil.copytree("generators", os.path.join(AIOWEB_SHARE, "generators"))
def run(self):
# run original build code
build.run(self)
# build samtools
build_path = os.path.abspath(self.build_temp)
cmd = ['make', '-C', 'external/samtools']
def compile():
subprocess.check_call(cmd)
self.execute(compile, [], 'Compile samtools')
def compile_htslib():
subprocess.check_call(['./configure'], cwd='external/samtools/htslib-1.2.1')
subprocess.check_call(['make'], cwd='external/samtools/htslib-1.2.1')
self.execute(compile_htslib, [], 'Compile htslib')
def run(self):
install.run(self)
instcmd = self.get_finalized_command('install')
root = instcmd.root
prefix = path.abspath(get_config_vars('prefix')[0])
data_dir = path.abspath(check_data_dir())
man_file = path.join(data_dir, "combirepo.1")
if not path.exists(man_file):
self.run_command('build_manpage')
man_path = path.abspath('{0}/{1}/share/man/man1/'.format(root, prefix))
if not path.exists(man_path):
makedirs(man_path)
print "Installing man page into {0}".format(man_path)
cmd = "bash -c 'gzip {0} \
&& install -m 0644 {0}.gz {1}/'".format(man_file, man_path)
args = shlex.split(cmd)
call(args)
def run(self):
# Make sure all modules are ready
build_cmd = self.get_finalized_command("build_py")
build_cmd.run()
# And make sure our scripts are ready
build_scripts_cmd = self.get_finalized_command("build_scripts")
build_scripts_cmd.run()
# make symlinks for test data
if build_cmd.build_lib != top_dir:
for path in ['testfiles.tar.gz', 'gnupg']:
src = os.path.join(top_dir, 'testing', path)
target = os.path.join(build_cmd.build_lib, 'testing', path)
try:
os.symlink(src, target)
except Exception:
pass
os.environ['PATH'] = "%s:%s" % (
os.path.abspath(build_scripts_cmd.build_dir),
os.environ.get('PATH'))
test.run(self)
def run(self):
# Normally, install will call build(). But we want to delete the
# testing dir between building and installing. So we manually build
# and mark ourselves to skip building when we run() for real.
self.run_command('build')
self.skip_build = True
# This should always be true, but just to make sure!
if self.build_lib != top_dir:
testing_dir = os.path.join(self.build_lib, 'testing')
os.system("rm -rf %s" % testing_dir)
install.run(self)
# TODO: move logic from dist/makedist inline
def run(self):
if self.dry_run:
print("skipping data install")
return
if (self.survey is None) and (self.model is None):
self.tarball = self._tarball
self.dirname = self._dirname
super(IsochroneCommand,self).run()
return
for survey in self.surveys:
for model in self.models:
self.tarball = "ugali-%s-%s.tar.gz"%(survey,model)
self.dirname = "isochrones/%s/%s"%(survey,model)
super(IsochroneCommand,self).run()
def update_version_py():
if not os.path.isdir(".git"):
print("This does not appear to be a Git repository.")
return
try:
#p = subprocess.Popen(["git", "describe","--tags", "--always"],
# stdout=subprocess.PIPE)
p = subprocess.Popen("git rev-list HEAD --count".split(),
stdout=subprocess.PIPE)
except EnvironmentError:
print("unable to run git, leaving structout/_version.py alone")
return
stdout = p.communicate()[0]
if p.returncode != 0:
print("unable to run git, leaving structout/_version.py alone")
return
ver = "0.0."+stdout.strip()
#ver = str(int(ver,16)) # pypi doesnt like base 16
f = open("structout/_version.py", "w")
f.write(VERSION_PY % ver)
f.close()
print("set structout/_version.py to '%s'" % ver)
def run(self):
# Perform original install steps
install.run(self)
# Perform custom install steps
from license_identifier.license_identifier import LicenseIdentifier
license_dir = os.path.join(
self.install_lib, 'license_identifier/data/license_dir'
)
pickle_file_path = os.path.join(
self.install_lib,
'license_identifier/data/license_n_gram_lib.pickle'
)
LicenseIdentifier(license_dir=license_dir,
pickle_file_path=pickle_file_path)
def run(self):
default_site = 'codeforces'
cache_dir = os.path.join(os.path.expanduser('~'), '.cache', 'ACedIt')
from main import supported_sites
for site in supported_sites:
# create cache directory structure
if not os.path.isdir(os.path.join(cache_dir, site)):
os.makedirs(os.path.join(cache_dir, site))
data = {'default_site': default_site.strip(
), 'default_contest': None, 'cachedir': cache_dir}
with open(os.path.join(cache_dir, 'constants.json'), 'w') as f:
f.write(json.dumps(data, indent=2))
install.run(self)
def run(self):
install.run(self)
print "Installing auto completion of tsaotun to",
src = os.path.join(ROOT_DIR, 'completion', 'tsaotun')
sys = platform.system()
try:
if sys == 'Darwin':
dest = os.path.join(
os.popen('brew --prefix').read()[:-1], 'etc', 'bash_completion.d', 'tsaotun')
print dest
shutil.copy(src, dest)
elif sys == 'Linux':
dest = os.path.join(
'/etc', 'bash_completion.d', 'tsaotun')
print dest
shutil.copy(src, dest)
else: # Windows, etc.
print "... \n Warning: {} is currently not supported. Skipped.".format(sys)
except IOError:
print "Permission denied: You probably want to copy '{}' to '{}' manually.".format(src, dest)
print "Tsaotun is installed successfully."
def run(self):
self.run_command('build')
_install.run(self)
def run(self):
# run original install code
install.run(self)
# install library
self.copy_tree(self.build_lib, self.install_lib)
def run(self):
_pre_install()
install.run(self)
# FIXME: why egg missed without manual install?
install.do_egg_install(self)
def run(self):
_pre_install()
develop.run(self)
def run(self, *args, **kwargs):
install.run(self, *args, **kwargs)
distcfg = os.path.join('girder_worker', 'worker.dist.cfg')
localcfg = os.path.join('girder_worker', 'worker.local.cfg')
if not os.path.isfile(localcfg):
print('Creating worker.local.cfg')
shutil.copyfile(distcfg, localcfg)
def run(cmd):
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode:
print(err)
sys.exit(1)
else:
return out
def initial_ave():
# create /etc/ave/user if it doesn't exist
import ave.config
try:
user = ave.config.create_etc()
except Exception, e:
print('ERROR: Installation failed: %s' % e)
return 1
# become the run-as user before checking/generating config files
try:
import ave.persona
home = ave.persona.become_user(user)
except Exception, e:
print('ERROR: Could not become user %s' % user)
return 2
# create the default AVE configuration
try:
ave.config.create_default(home)
except Exception, e:
print(
'ERROR: Could not create configuration files for %s: %s'
% (user, str(e))
)
return 3
def run(self):
install.run(self)
self.execute(post_install, [], msg="Running post install task")
# Get the long description from the README file
def build_libfdtx():
root = here + '/common/src/libfdtx'
run(['make', '-C', root, 'clean'])
run(['make', '-C', root, 'libfdtx.so'])
return 'common/src/libfdtx/libfdtx.so'
# Build Galatea
def run(self):
self.run_command('build')
_install.run(self)
def run(self):
_install.run(self)
self.execute(_run_build_tables, (self.install_lib,),
msg="Build the lexing/parsing tables")
def run(self):
_install.do_egg_install(self)
spacy_download_en()
_install.run(self)