def initializePage(self):
if self.wizard.import_type == 'directory':
self.import_dir()
self.unarchive_label.hide()
self.unarchive_progressbar.hide()
elif self.wizard.import_type == 'archive':
self.tempdir = tempfile.TemporaryDirectory()
self.archivepath = self.field('archivepath')
self.unarchive()
else:
self.download_label.setVisible(True)
self.download_progressbar.setVisible(True)
self.unarchive_progressbar.setMaximum(1)
self.tempdir = tempfile.TemporaryDirectory()
self.archivepath = os.path.join(self.tempdir.name, 'db.7z')
import_signals.download_complete.connect(self.unarchive)
self.download_thread = DownloadThread(
session, self.wizard.db_url, self.tempdir.name)
import_signals.download_complete.connect(self.download_thread.exit)
self.download_thread.start()
python类TemporaryDirectory()的实例源码
def test_zipfile_timestamp():
# An environment variable can be used to influence the timestamp on
# TarInfo objects inside the zip. See issue #143. TemporaryDirectory is
# not a context manager under Python 3.
with temporary_directory() as tempdir:
for filename in ('one', 'two', 'three'):
path = os.path.join(tempdir, filename)
with codecs.open(path, 'w', encoding='utf-8') as fp:
fp.write(filename + '\n')
zip_base_name = os.path.join(tempdir, 'dummy')
# The earliest date representable in TarInfos, 1980-01-01
with environ('SOURCE_DATE_EPOCH', '315576060'):
zip_filename = wheel.archive.make_wheelfile_inner(
zip_base_name, tempdir)
with readable_zipfile(zip_filename) as zf:
for info in zf.infolist():
assert info.date_time[:3] == (1980, 1, 1)
def test_spearman(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'value': ['1.0', '2.0', '3.0']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_correlation(output_dir, alpha_div, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
jsonp_fp = os.path.join(output_dir, 'category-value.jsonp')
self.assertTrue(os.path.exists(jsonp_fp))
self.assertTrue('Spearman' in open(jsonp_fp).read())
self.assertTrue('"sampleSize": 3' in open(jsonp_fp).read())
self.assertTrue('"data":' in open(jsonp_fp).read())
self.assertFalse('filtered' in open(jsonp_fp).read())
def test_pearson(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'value': ['1.0', '2.0', '3.0']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_correlation(output_dir, alpha_div, md, method='pearson')
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
jsonp_fp = os.path.join(output_dir, 'category-value.jsonp')
self.assertTrue(os.path.exists(jsonp_fp))
self.assertTrue('Pearson' in open(jsonp_fp).read())
self.assertTrue('"sampleSize": 3' in open(jsonp_fp).read())
self.assertTrue('"data":' in open(jsonp_fp).read())
self.assertFalse('filtered' in open(jsonp_fp).read())
def test_alpha_group_significance(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'a or b': ['a', 'b', 'b']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_group_significance(output_dir, alpha_div, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
self.assertTrue(os.path.exists(
os.path.join(output_dir,
'category-a%20or%20b.jsonp')))
self.assertTrue('Kruskal-Wallis (all groups)'
in open(index_fp).read())
self.assertTrue('Kruskal-Wallis (pairwise)'
in open(index_fp).read())
def test_alpha_group_significance_some_numeric(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'a or b': ['a', 'b', 'b'],
'bad': ['1.0', '2.0', '3.0']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_group_significance(output_dir, alpha_div, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
self.assertTrue(os.path.exists(
os.path.join(output_dir,
'category-a%20or%20b.jsonp')))
self.assertFalse(os.path.exists(
os.path.join(output_dir,
'bad-value.jsonp')))
self.assertTrue('not categorical:' in open(index_fp).read())
self.assertTrue('<strong>bad' in open(index_fp).read())
def test_alpha_group_significance_one_group_all_unique_values(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'a or b': ['a', 'b', 'b'],
'bad': ['x', 'y', 'z']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_group_significance(output_dir, alpha_div, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
self.assertTrue(os.path.exists(
os.path.join(output_dir,
'category-a%20or%20b.jsonp')))
self.assertFalse(os.path.exists(
os.path.join(output_dir,
'category-bad.jsonp')))
self.assertTrue('number of samples' in open(index_fp).read())
self.assertTrue('<strong>bad' in open(index_fp).read())
def test_alpha_group_significance_one_group_single_value(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'a or b': ['a', 'b', 'b'],
'bad': ['x', 'x', 'x']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_group_significance(output_dir, alpha_div, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
self.assertTrue(os.path.exists(
os.path.join(output_dir,
'category-a%20or%20b.jsonp')))
self.assertFalse(os.path.exists(
os.path.join(output_dir,
'category-bad.jsonp')))
self.assertTrue('only a single' in open(index_fp).read())
self.assertTrue('<strong>bad' in open(index_fp).read())
def test_alpha_rarefaction_with_phylogeny_and_metadata(self):
t = biom.Table(np.array([[100, 111, 113], [111, 111, 112]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
p = skbio.TreeNode.read(io.StringIO(
'((O1:0.25, O2:0.50):0.25, O3:0.75)root;'))
md = qiime2.Metadata(
pd.DataFrame({'pet': ['russ', 'milo', 'peanut']},
index=['S1', 'S2', 'S3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_rarefaction(output_dir, t, max_depth=200, phylogeny=p,
metadata=md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
self.assertTrue('observed_otus' in open(index_fp).read())
self.assertTrue('shannon' in open(index_fp).read())
self.assertTrue('faith_pd' in open(index_fp).read())
def test_simple(self):
d = [[1.04, 1.5, 2., 2.5, 1.18, 2.82, 2.96, 3., 1, 3., 1., 'S1'],
[1.04, 1.5, 2., 2.5, 1.18, 2.82, 2.96, 3., 1, 3., 1., 'S2'],
[1.04, 1.5, 2., 2.5, 1.18, 2.82, 2.96, 3., 1, 3., 1., 'S3']]
data = pd.DataFrame(data=d, columns=['2%', '25%', '50%', '75%', '9%',
'91%', '98%', 'count', 'depth',
'max', 'min', 'sample-id'])
with tempfile.TemporaryDirectory() as output_dir:
_alpha_rarefaction_jsonp(output_dir, 'peanut.jsonp', 'shannon',
data, '')
jsonp_fp = os.path.join(output_dir, 'peanut.jsonp')
self.assertTrue(os.path.exists(jsonp_fp))
jsonp_content = open(jsonp_fp).read()
self.assertTrue('load_data' in jsonp_content)
self.assertTrue('columns' in jsonp_content)
self.assertTrue('index' in jsonp_content)
self.assertTrue('data' in jsonp_content)
self.assertTrue('sample-id' in jsonp_content)
self.assertTrue('shannon' in jsonp_content)
def test_bioenv(self):
dm = skbio.DistanceMatrix([[0.00, 0.25, 0.25],
[0.25, 0.00, 0.00],
[0.25, 0.00, 0.00]],
ids=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame([['1.0', 'a'], ['2.0', 'b'], ['3.0', 'c']],
index=['sample1', 'sample2', 'sample3'],
columns=['metadata1', 'metadata2']))
with tempfile.TemporaryDirectory() as output_dir:
bioenv(output_dir, dm, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
self.assertTrue('metadata1' in open(index_fp).read())
self.assertTrue('not numerical' in open(index_fp).read())
self.assertTrue('<strong>metadata2' in open(index_fp).read())
self.assertFalse('Warning' in open(index_fp).read())
def test_bioenv_extra_metadata(self):
dm = skbio.DistanceMatrix([[0.00, 0.25, 0.25],
[0.25, 0.00, 0.00],
[0.25, 0.00, 0.00]],
ids=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame([['1.0', 'a'], ['2.0', 'b'], ['3.0', 'c'],
['4.0', 'd']],
index=['sample1', 'sample2', 'sample3', 'sample4'],
columns=['metadata1', 'metadata2']))
with tempfile.TemporaryDirectory() as output_dir:
bioenv(output_dir, dm, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
self.assertTrue('metadata1' in open(index_fp).read())
self.assertTrue('not numerical' in open(index_fp).read())
self.assertTrue('<strong>metadata2' in open(index_fp).read())
self.assertFalse('Warning' in open(index_fp).read())
def test_bioenv_zero_variance_column(self):
dm = skbio.DistanceMatrix([[0.00, 0.25, 0.25],
[0.25, 0.00, 0.00],
[0.25, 0.00, 0.00]],
ids=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame([['1.0', '2.0'], ['2.0', '2.0'], ['3.0', '2.0']],
index=['sample1', 'sample2', 'sample3'],
columns=['metadata1', 'metadata2']))
with tempfile.TemporaryDirectory() as output_dir:
bioenv(output_dir, dm, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue('metadata1' in open(index_fp).read())
self.assertTrue('no variance' in open(index_fp).read())
self.assertTrue('<strong>metadata2' in open(index_fp).read())
self.assertFalse('Warning' in open(index_fp).read())
def run():
urls = [
x.strip()
for x in URLS.strip().splitlines()
if x.strip() and not x.strip().startswith('#')
]
with tempfile.TemporaryDirectory(prefix='symbols') as tmpdirname:
downloaded = download_all(urls, tmpdirname)
save_filepath = 'symbols-for-systemtests.zip'
total_time_took = 0.0
total_size = 0
with zipfile.ZipFile(save_filepath, mode='w') as zf:
for uri, (fullpath, time_took, size) in downloaded.items():
total_time_took += time_took
total_size += size
if fullpath:
path = uri.replace('v1/', '')
assert os.path.isfile(fullpath)
zf.write(
fullpath,
arcname=path,
compress_type=zipfile.ZIP_DEFLATED,
)
def make_tempdir(prefix=None, suffix=None):
"""Decorator that adds a last argument that is the path to a temporary
directory that gets deleted after the function has finished.
Usage::
@make_tempdir()
def some_function(arg1, arg2, tempdir, kwargs1='one'):
assert os.path.isdir(tempdir)
...
"""
def decorator(func):
@wraps(func)
def inner(*args, **kwargs):
with TemporaryDirectory(prefix=prefix, suffix=suffix) as f:
args = args + (f,)
return func(*args, **kwargs)
return inner
return decorator
def test_zipfile_timestamp():
# An environment variable can be used to influence the timestamp on
# TarInfo objects inside the zip. See issue #143. TemporaryDirectory is
# not a context manager under Python 3.
with temporary_directory() as tempdir:
for filename in ('one', 'two', 'three'):
path = os.path.join(tempdir, filename)
with codecs.open(path, 'w', encoding='utf-8') as fp:
fp.write(filename + '\n')
zip_base_name = os.path.join(tempdir, 'dummy')
# The earliest date representable in TarInfos, 1980-01-01
with environ('SOURCE_DATE_EPOCH', '315576060'):
zip_filename = wheel.archive.make_wheelfile_inner(
zip_base_name, tempdir)
with readable_zipfile(zip_filename) as zf:
for info in zf.infolist():
assert info.date_time[:3] == (1980, 1, 1)
def test_zipfile_timestamp():
# An environment variable can be used to influence the timestamp on
# TarInfo objects inside the zip. See issue #143. TemporaryDirectory is
# not a context manager under Python 3.
with temporary_directory() as tempdir:
for filename in ('one', 'two', 'three'):
path = os.path.join(tempdir, filename)
with codecs.open(path, 'w', encoding='utf-8') as fp:
fp.write(filename + '\n')
zip_base_name = os.path.join(tempdir, 'dummy')
# The earliest date representable in TarInfos, 1980-01-01
with environ('SOURCE_DATE_EPOCH', '315576060'):
zip_filename = wheel.archive.make_wheelfile_inner(
zip_base_name, tempdir)
with readable_zipfile(zip_filename) as zf:
for info in zf.infolist():
assert info.date_time[:3] == (1980, 1, 1)
def test_build_error(experiment_class):
with testing.postgresql.Postgresql() as postgresql:
db_engine = create_engine(postgresql.url())
ensure_db(db_engine)
with TemporaryDirectory() as temp_dir:
experiment = experiment_class(
config=sample_config(),
db_engine=db_engine,
model_storage_class=FSModelStorageEngine,
project_path=os.path.join(temp_dir, 'inspections'),
)
with mock.patch.object(experiment, 'build_matrices') as build_mock:
build_mock.side_effect = RuntimeError('boom!')
with pytest.raises(RuntimeError):
experiment()
def test_build_error_cleanup_timeout(_clean_up_mock, experiment_class):
with testing.postgresql.Postgresql() as postgresql:
db_engine = create_engine(postgresql.url())
ensure_db(db_engine)
with TemporaryDirectory() as temp_dir:
experiment = experiment_class(
config=sample_config(),
db_engine=db_engine,
model_storage_class=FSModelStorageEngine,
project_path=os.path.join(temp_dir, 'inspections'),
cleanup_timeout=0.02, # Set short timeout
)
with mock.patch.object(experiment, 'build_matrices') as build_mock:
build_mock.side_effect = RuntimeError('boom!')
with pytest.raises(TimeoutError) as exc_info:
experiment()
# Last exception is TimeoutError, but earlier error is preserved in
# __context__, and will be noted as well in any standard traceback:
assert exc_info.value.__context__ is build_mock.side_effect
def get_function_root(self, name):
if not hasattr(self, 'functions_output'):
self.functions_output = TemporaryDirectory("puresec-serverless-functions-")
package_name = self._get_function_package_name(name)
function_root = os.path.join(self.functions_output.name, package_name)
if os.path.exists(function_root):
return function_root
try:
zipfile = ZipFile(os.path.join(self.serverless_package, "{}.zip".format(package_name)), 'r')
except FileNotFoundError:
eprint("error: serverless package did not create a function zip for '{}'", name)
raise SystemExit(2)
except BadZipFile:
eprint("error: serverless package did not create a valid function zip for '{}'", name)
raise SystemExit(2)
with zipfile:
zipfile.extractall(function_root)
return function_root