def doItConsolicious(opt):
# reclaim stdout/stderr from log
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
if opt['zipfile']:
print 'Unpacking documentation...'
for n in zipstream.unzipIter(opt['zipfile'], opt['ziptargetdir']):
if n % 100 == 0:
print n,
if n % 1000 == 0:
print
print 'Done unpacking.'
if opt['compiledir']:
print 'Compiling to pyc...'
import compileall
compileall.compile_dir(opt["compiledir"])
print 'Done compiling.'
python类compile_dir()的实例源码
def test_compile_files(self):
# Test compiling a single file, and complete directory
for fn in (self.bc_path, self.bc_path2):
try:
os.unlink(fn)
except:
pass
compileall.compile_file(self.source_path, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) and
not os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) and
os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
os.unlink(self.bc_path2)
def test_compile_files(self):
# Test compiling a single file, and complete directory
for fn in (self.bc_path, self.bc_path2):
try:
os.unlink(fn)
except:
pass
compileall.compile_file(self.source_path, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) \
and not os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) \
and os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
os.unlink(self.bc_path2)
def test_compile_files(self):
# Test compiling a single file, and complete directory
for fn in (self.bc_path, self.bc_path2):
try:
os.unlink(fn)
except:
pass
compileall.compile_file(self.source_path, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) \
and not os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) \
and os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
os.unlink(self.bc_path2)
def doItConsolicious(opt):
# reclaim stdout/stderr from log
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
if opt['zipfile']:
print 'Unpacking documentation...'
for n in zipstream.unzipIter(opt['zipfile'], opt['ziptargetdir']):
if n % 100 == 0:
print n,
if n % 1000 == 0:
print
print 'Done unpacking.'
if opt['compiledir']:
print 'Compiling to pyc...'
import compileall
compileall.compile_dir(opt["compiledir"])
print 'Done compiling.'
def handle(self, que_only=False, **options):
if que_only:
target_folders = [self._path(self.PROJECT_DIR, i) for i in ('envs', 'core', 'que')]
else:
target_folders = [self.PROJECT_DIR]
quiet = int(int(options.get('verbosity', self.default_verbosity)) <= self.default_verbosity)
for folder in target_folders:
self.display('Byte-compiling all modules in %s' % folder, color='white')
rc = compile_dir(folder, maxlevels=20, quiet=quiet)
if rc:
self.display('Byte-compiled all modules in %s' % folder, color='green')
else:
self.display('Error while byte-compiling some modules in %s' % folder, color='yellow')
def test_compile_files(self):
# Test compiling a single file, and complete directory
for fn in (self.bc_path, self.bc_path2):
try:
os.unlink(fn)
except:
pass
compileall.compile_file(self.source_path, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) and
not os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) and
os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
os.unlink(self.bc_path2)
def test_compile_files(self):
# Test compiling a single file, and complete directory
for fn in (self.bc_path, self.bc_path2):
try:
os.unlink(fn)
except:
pass
compileall.compile_file(self.source_path, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) \
and not os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) \
and os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
os.unlink(self.bc_path2)
def test_compile_files(self):
# Test compiling a single file, and complete directory
for fn in (self.bc_path, self.bc_path2):
try:
os.unlink(fn)
except:
pass
compileall.compile_file(self.source_path, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) and
not os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) and
os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
os.unlink(self.bc_path2)
def test_compile_files(self):
# Test compiling a single file, and complete directory
for fn in (self.bc_path, self.bc_path2):
try:
os.unlink(fn)
except:
pass
compileall.compile_file(self.source_path, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) \
and not os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) \
and os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
os.unlink(self.bc_path2)
def test_compile_files(self):
# Test compiling a single file, and complete directory
for fn in (self.bc_path, self.bc_path2):
try:
os.unlink(fn)
except:
pass
compileall.compile_file(self.source_path, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) and
not os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(os.path.isfile(self.bc_path) and
os.path.isfile(self.bc_path2))
os.unlink(self.bc_path)
os.unlink(self.bc_path2)
def _compile(self):
"""
Byte-compile all modules and packages.
"""
self.log('\n# Byte-compiling all *.py files...')
quiet = self.verbosity <= 1
valid = compileall.compile_dir(self._root, quiet=quiet, force=True)
if not valid:
self.log('\n FAIL: Compilation error(s)\n')
self._success = False
def recreation_check(self, metadata):
"""Check that compileall recreates bytecode when the new metadata is
used."""
if not hasattr(os, 'stat'):
return
py_compile.compile(self.source_path)
self.assertEqual(*self.data())
with open(self.bc_path, 'rb') as file:
bc = file.read()[len(metadata):]
with open(self.bc_path, 'wb') as file:
file.write(metadata)
file.write(bc)
self.assertNotEqual(*self.data())
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(*self.data())
def test_optimize(self):
# make sure compiling with different optimization settings than the
# interpreter's creates the correct file names
optimize = 1 if __debug__ else 0
compileall.compile_dir(self.directory, quiet=True, optimize=optimize)
cached = imp.cache_from_source(self.source_path,
debug_override=not optimize)
self.assertTrue(os.path.isfile(cached))
cached2 = imp.cache_from_source(self.source_path2,
debug_override=not optimize)
self.assertTrue(os.path.isfile(cached2))
cached3 = imp.cache_from_source(self.source_path3,
debug_override=not optimize)
self.assertTrue(os.path.isfile(cached3))
def test_error(self):
try:
orig_stdout = sys.stdout
sys.stdout = io.TextIOWrapper(io.BytesIO(),encoding='ascii')
compileall.compile_dir(self.directory)
finally:
sys.stdout = orig_stdout
def recreation_check(self, metadata):
"""Check that compileall recreates bytecode when the new metadata is
used."""
py_compile.compile(self.source_path)
self.assertEqual(*self.data())
with open(self.bc_path, 'rb') as file:
bc = file.read()[len(metadata):]
with open(self.bc_path, 'wb') as file:
file.write(metadata)
file.write(bc)
self.assertNotEqual(*self.data())
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(*self.data())
def recreation_check(self, metadata):
"""Check that compileall recreates bytecode when the new metadata is
used."""
py_compile.compile(self.source_path)
self.assertEqual(*self.data())
with open(self.bc_path, 'rb') as file:
bc = file.read()[len(metadata):]
with open(self.bc_path, 'wb') as file:
file.write(metadata)
file.write(bc)
self.assertNotEqual(*self.data())
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(*self.data())
def create_virtualenv(bundle_path, requirements_path):
scripts_path = os.path.join(bundle_path, 'Contents', 'Scripts')
virtualenv.create_environment(scripts_path, site_packages=True)
virtualenv.make_environment_relocatable(scripts_path)
pip.main(['install', '--prefix', scripts_path, '-r', requirements_path])
compileall.compile_dir(scripts_path, maxlevels=0)
def recreation_check(self, metadata):
"""Check that compileall recreates bytecode when the new metadata is
used."""
if not hasattr(os, 'stat'):
return
py_compile.compile(self.source_path)
self.assertEqual(*self.data())
with open(self.bc_path, 'rb') as file:
bc = file.read()[len(metadata):]
with open(self.bc_path, 'wb') as file:
file.write(metadata)
file.write(bc)
self.assertNotEqual(*self.data())
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(*self.data())
def test_optimize(self):
# make sure compiling with different optimization settings than the
# interpreter's creates the correct file names
optimize = 1 if __debug__ else 0
compileall.compile_dir(self.directory, quiet=True, optimize=optimize)
cached = imp.cache_from_source(self.source_path,
debug_override=not optimize)
self.assertTrue(os.path.isfile(cached))
cached2 = imp.cache_from_source(self.source_path2,
debug_override=not optimize)
self.assertTrue(os.path.isfile(cached2))
cached3 = imp.cache_from_source(self.source_path3,
debug_override=not optimize)
self.assertTrue(os.path.isfile(cached3))
def test_error(self):
try:
orig_stdout = sys.stdout
sys.stdout = io.TextIOWrapper(io.BytesIO(),encoding='ascii')
compileall.compile_dir(self.directory)
finally:
sys.stdout = orig_stdout
def recreation_check(self, metadata):
"""Check that compileall recreates bytecode when the new metadata is
used."""
py_compile.compile(self.source_path)
self.assertEqual(*self.data())
with open(self.bc_path, 'rb') as file:
bc = file.read()[len(metadata):]
with open(self.bc_path, 'wb') as file:
file.write(metadata)
file.write(bc)
self.assertNotEqual(*self.data())
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(*self.data())
def recreation_check(self, metadata):
"""Check that compileall recreates bytecode when the new metadata is
used."""
py_compile.compile(self.source_path)
self.assertEqual(*self.data())
with open(self.bc_path, 'rb') as file:
bc = file.read()[len(metadata):]
with open(self.bc_path, 'wb') as file:
file.write(metadata)
file.write(bc)
self.assertNotEqual(*self.data())
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(*self.data())
def test_optimize(self):
# make sure compiling with different optimization settings than the
# interpreter's creates the correct file names
optimize = 1 if __debug__ else 0
compileall.compile_dir(self.directory, quiet=True, optimize=optimize)
cached = importlib.util.cache_from_source(self.source_path,
debug_override=not optimize)
self.assertTrue(os.path.isfile(cached))
cached2 = importlib.util.cache_from_source(self.source_path2,
debug_override=not optimize)
self.assertTrue(os.path.isfile(cached2))
cached3 = importlib.util.cache_from_source(self.source_path3,
debug_override=not optimize)
self.assertTrue(os.path.isfile(cached3))
def test_error(self):
try:
orig_stdout = sys.stdout
sys.stdout = io.TextIOWrapper(io.BytesIO(),encoding='ascii')
compileall.compile_dir(self.directory)
finally:
sys.stdout = orig_stdout
def recreation_check(self, metadata):
"""Check that compileall recreates bytecode when the new metadata is
used."""
if not hasattr(os, 'stat'):
return
py_compile.compile(self.source_path)
self.assertEqual(*self.data())
with open(self.bc_path, 'rb') as file:
bc = file.read()[len(metadata):]
with open(self.bc_path, 'wb') as file:
file.write(metadata)
file.write(bc)
self.assertNotEqual(*self.data())
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(*self.data())
def test_alwaysPreferPy(self):
"""
Verify that .py files will always be preferred to .pyc files, regardless of
directory listing order.
"""
mypath = FilePath(self.mktemp())
mypath.createDirectory()
pp = modules.PythonPath(sysPath=[mypath.path])
originalSmartPath = pp._smartPath
def _evilSmartPath(pathName):
o = originalSmartPath(pathName)
originalChildren = o.children
def evilChildren():
# normally this order is random; let's make sure it always
# comes up .pyc-first.
x = list(originalChildren())
x.sort()
x.reverse()
return x
o.children = evilChildren
return o
mypath.child("abcd.py").setContent(b'\n')
compileall.compile_dir(mypath.path, quiet=True)
# sanity check
self.assertEqual(len(list(mypath.children())), 2)
pp._smartPath = _evilSmartPath
self.assertEqual(pp['abcd'].filePath,
mypath.child('abcd.py'))
def test_freshPyReplacesStalePyc(self):
"""
Verify that if a stale .pyc file on the PYTHONPATH is replaced by a
fresh .py file, the plugins in the new .py are picked up rather than
the stale .pyc, even if the .pyc is still around.
"""
mypath = self.appPackage.child("stale.py")
mypath.setContent(pluginFileContents('one'))
# Make it super stale
x = time.time() - 1000
os.utime(mypath.path, (x, x))
pyc = mypath.sibling('stale.pyc')
# compile it
if _PY3:
# On python 3, don't use the __pycache__ directory; the intention
# of scanning for .pyc files is for configurations where you want
# to intentionally include them, which means we _don't_ scan for
# them inside cache directories.
extra = dict(legacy=True)
else:
# On python 2 this option doesn't exist.
extra = dict()
compileall.compile_dir(self.appPackage.path, quiet=1, **extra)
os.utime(pyc.path, (x, x))
# Eliminate the other option.
mypath.remove()
# Make sure it's the .pyc path getting cached.
self.resetEnvironment()
# Sanity check.
self.assertIn('one', self.getAllPlugins())
self.failIfIn('two', self.getAllPlugins())
self.resetEnvironment()
mypath.setContent(pluginFileContents('two'))
self.failIfIn('one', self.getAllPlugins())
self.assertIn('two', self.getAllPlugins())
def recreation_check(self, metadata):
"""Check that compileall recreates bytecode when the new metadata is
used."""
py_compile.compile(self.source_path)
self.assertEqual(*self.data())
with open(self.bc_path, 'rb') as file:
bc = file.read()[len(metadata):]
with open(self.bc_path, 'wb') as file:
file.write(metadata)
file.write(bc)
self.assertNotEqual(*self.data())
compileall.compile_dir(self.directory, force=False, quiet=True)
self.assertTrue(*self.data())
def test_optimize(self):
# make sure compiling with different optimization settings than the
# interpreter's creates the correct file names
optimize = 1 if __debug__ else 0
compileall.compile_dir(self.directory, quiet=True, optimize=optimize)
cached = importlib.util.cache_from_source(self.source_path,
debug_override=not optimize)
self.assertTrue(os.path.isfile(cached))
cached2 = importlib.util.cache_from_source(self.source_path2,
debug_override=not optimize)
self.assertTrue(os.path.isfile(cached2))
cached3 = importlib.util.cache_from_source(self.source_path3,
debug_override=not optimize)
self.assertTrue(os.path.isfile(cached3))