def sync_helpers(include, src, dest, options=None):
if not os.path.isdir(dest):
os.makedirs(dest)
global_options = parse_sync_options(options)
for inc in include:
if isinstance(inc, str):
inc, opts = extract_options(inc, global_options)
sync(src, dest, inc, opts)
elif isinstance(inc, dict):
# could also do nested dicts here.
for k, v in six.iteritems(inc):
if isinstance(v, list):
for m in v:
inc, opts = extract_options(m, global_options)
sync(src, dest, '%s.%s' % (k, inc), opts)
python类makedirs()的实例源码
def mkdir(path, owner='root', group='root', perms=0o555, force=False):
"""Create a directory"""
log("Making dir {} {}:{} {:o}".format(path, owner, group,
perms))
uid = pwd.getpwnam(owner).pw_uid
gid = grp.getgrnam(group).gr_gid
realpath = os.path.abspath(path)
path_exists = os.path.exists(realpath)
if path_exists and force:
if not os.path.isdir(realpath):
log("Removing non-directory file {} prior to mkdir()".format(path))
os.unlink(realpath)
os.makedirs(realpath, perms)
elif not path_exists:
os.makedirs(realpath, perms)
os.chown(realpath, uid, gid)
os.chmod(realpath, perms)
def create_directory_if_not_exists(logger, path):
"""
Creates 'path' if it does not exist
If creation fails, an exception will be thrown
:param logger: the logger
:param path: the path to ensure it exists
"""
try:
os.makedirs(path)
except OSError as ex:
if ex.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
log_critical_error(logger, ex, 'An error happened trying to create ' + path)
raise
def save_exported_media_to_file(logger, export_dir, media_file, filename, extension):
"""
Write exported media item to disk at specified location with specified file name.
Any existing file with the same name will be overwritten.
:param logger: the logger
:param export_dir: path to directory for exports
:param media_file: media file to write to disc
:param filename: filename to give exported image
:param extension: extension to give exported image
"""
if not os.path.exists(export_dir):
logger.info("Creating directory at {0} for media files.".format(export_dir))
os.makedirs(export_dir)
file_path = os.path.join(export_dir, filename + '.' + extension)
if os.path.isfile(file_path):
logger.info('Overwriting existing report at ' + file_path)
try:
with open(file_path, 'wb') as out_file:
shutil.copyfileobj(media_file.raw, out_file)
del media_file
except Exception as ex:
log_critical_error(logger, ex, 'Exception while writing' + file_path + ' to file')
def main():
args = get_args()
if not os.path.exists(args.outdir):
os.makedirs(args.outdir)
sources = [args.fastq, args.bam, args.summary]
sourcename = ["fastq", "bam", "summary"]
datadf = nanoget.get_input(
source=[n for n, s in zip(sourcename, sources) if s][0],
files=[f for f in sources if f][0],
threads=args.threads,
readtype=args.readtype,
combine="track")
if args.name:
output = args.name
else:
output = os.path.join(args.outdir, args.prefix + "NanoStats.txt")
write_stats([datadf], output)
def create_directory_if_not_exists(self, path):
"""
Creates 'path' if it does not exist
If creation fails, an exception will be thrown
:param path: the path to ensure it exists
"""
try:
os.makedirs(path)
except OSError as ex:
if ex.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
self.log_critical_error(ex, 'An error happened trying to create ' + path)
raise
def downloadExamples(examples,connType,conn=False):
print("===========")
print("DOWNLOADING")
print("===========")
try:
os.makedirs(kResultStorageFolder)
except OSError:
if not os.path.isdir(kResultStorageFolder):
raise
leaveconn = True
if not conn:
leaveconn = False
conn = getDB(connType)
for example in examples:
print("---------\nEXAMPLE: {}\n-------".format(example))
example, room = example.rsplit('_',1)
getExampleFromDB(example, connType, conn)
if not leaveconn:
conn.close()
def workthread(item, user_agent,path):
strurl = 'http://yxpjw.club'+item[0]
picname = item[1]
print('????%s...........................\n' %(picname))
req = request.Request(strurl)
req.add_header('User-Agent',user_agent)
response = request.urlopen(req)
content = response.read().decode('gbk')
strurl2 = re.search(r'^(.*)/',strurl).group(0)
print('https headers...............%s'%(strurl2))
#destname = os.path.join(path,picname+'.txt')
#with open(destname, 'w',encoding='gbk') as file:
#file.write(content)
destdir = os.path.join(path,picname)
os.makedirs(destdir)
page = 1
while(1):
content = getpagedata(content,destdir,page,strurl2)
if not content:
break
page = page + 1
print('%s?????????\n'%(picname))
def save_to_path(self, filepath):
"""Save retrieved data to file at ``filepath``.
.. versionadded: 1.9.6
:param filepath: Path to save retrieved data.
"""
filepath = os.path.abspath(filepath)
dirname = os.path.dirname(filepath)
if not os.path.exists(dirname):
os.makedirs(dirname)
self.stream = True
with open(filepath, 'wb') as fileobj:
for data in self.iter_content():
fileobj.write(data)
def create_directory_if_not_exists(logger, path):
"""
Creates 'path' if it does not exist
If creation fails, an exception will be thrown
:param logger: the logger
:param path: the path to ensure it exists
"""
try:
os.makedirs(path)
except OSError as ex:
if ex.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
log_critical_error(logger, ex, 'An error happened trying to create ' + path)
raise
def get(self, resource):
"""
Get a resource into the cache,
:param resource: A :class:`Resource` instance.
:return: The pathname of the resource in the cache.
"""
prefix, path = resource.finder.get_cache_info(resource)
if prefix is None:
result = path
else:
result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
dirname = os.path.dirname(result)
if not os.path.isdir(dirname):
os.makedirs(dirname)
if not os.path.exists(result):
stale = True
else:
stale = self.is_stale(resource, path)
if stale:
# write the bytes of the resource to the cache location
with open(result, 'wb') as f:
f.write(resource.bytes)
return result
def delete_account(self, account):
"""
Deletes the given `account` from the `keystore_dir` directory.
Then deletes it from the `AccountsService` account manager instance.
In fact, moves it to another location; another directory at the same
level.
"""
app = self.app
keystore_dir = app.services.accounts.keystore_dir
deleted_keystore_dir = PyWalib.deleted_account_dir(keystore_dir)
# create the deleted account dir if required
if not os.path.exists(deleted_keystore_dir):
os.makedirs(deleted_keystore_dir)
# "removes" it from the file system
account_filename = os.path.basename(account.path)
deleted_account_path = os.path.join(
deleted_keystore_dir, account_filename)
shutil.move(account.path, deleted_account_path)
# deletes it from the `AccountsService` account manager instance
account_service = self.get_account_list()
account_service.accounts.remove(account)
def test_delete_account_already_exists(self):
"""
If the destination (backup/trash) directory where the account is moved
already exists, it should be handled gracefully.
This could happens if the account gets deleted, then reimported and
deleted again, refs:
https://github.com/AndreMiras/PyWallet/issues/88
"""
pywalib = self.pywalib
account = self.helper_new_account()
# creates a file in the backup/trash folder that would conflict
# with the deleted account
deleted_keystore_dir = PyWalib.deleted_account_dir(self.keystore_dir)
os.makedirs(deleted_keystore_dir)
account_filename = os.path.basename(account.path)
deleted_account_path = os.path.join(
deleted_keystore_dir, account_filename)
# create that file
open(deleted_account_path, 'a').close()
# then deletes the account and verifies it worked
self.assertEqual(len(pywalib.get_account_list()), 1)
pywalib.delete_account(account)
self.assertEqual(len(pywalib.get_account_list()), 0)
def generate_dataset_file(num_rows, num_cols, num_cats=4, rate=1.0):
"""Generate a random dataset.
Returns:
The path to a gzipped pickled data table.
"""
path = os.path.join(DATA, '{}-{}-{}-{:0.1f}.dataset.pkz'.format(
num_rows, num_cols, num_cats, rate))
if os.path.exists(path):
return path
print('Generating {}'.format(path))
if not os.path.exists(DATA):
os.makedirs(DATA)
dataset = generate_dataset(num_rows, num_cols, num_cats, rate)
pickle_dump(dataset, path)
return path
def generate_model_file(num_rows, num_cols, num_cats=4, rate=1.0):
"""Generate a random model.
Returns:
The path to a gzipped pickled model.
"""
path = os.path.join(DATA, '{}-{}-{}-{:0.1f}.model.pkz'.format(
num_rows, num_cols, num_cats, rate))
V = num_cols
K = V * (V - 1) // 2
if os.path.exists(path):
return path
print('Generating {}'.format(path))
if not os.path.exists(DATA):
os.makedirs(DATA)
dataset_path = generate_dataset_file(num_rows, num_cols, num_cats, rate)
dataset = pickle_load(dataset_path)
table = dataset['table']
tree_prior = np.zeros(K, dtype=np.float32)
config = make_config(learning_init_epochs=5)
model = train_model(table, tree_prior, config)
pickle_dump(model, path)
return path
def create_default_project(cls, path, name='DefaultProject'):
"""Creates default maya project structure along with a suitable
workspace.mel file.
:param str path: The path that the default project structure will be
created.
:return:
"""
project_path = os.path.join(path, name)
# lets create the structure
for dir_name in cls.default_project_structure.split('\n'):
dir_path = os.path.join(project_path, dir_name)
try:
os.makedirs(dir_path)
except OSError:
pass
# create the workspace.mel
workspace_mel_path = os.path.join(project_path, 'workspace.mel')
with open(workspace_mel_path, 'w+') as f:
f.writelines(cls.default_workspace_content)
return project_path
def get(self, resource):
"""
Get a resource into the cache,
:param resource: A :class:`Resource` instance.
:return: The pathname of the resource in the cache.
"""
prefix, path = resource.finder.get_cache_info(resource)
if prefix is None:
result = path
else:
result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
dirname = os.path.dirname(result)
if not os.path.isdir(dirname):
os.makedirs(dirname)
if not os.path.exists(result):
stale = True
else:
stale = self.is_stale(resource, path)
if stale:
# write the bytes of the resource to the cache location
with open(result, 'wb') as f:
f.write(resource.bytes)
return result
def get_config_file_path(cls):
if not cls.IS_GLOBAL:
# local to this directory
base_path = os.path.join('.')
else:
base_path = os.path.expanduser('~')
if not os.access(base_path, os.W_OK):
base_path = '/tmp'
base_path = os.path.join(base_path, '.polyaxon')
if not os.path.exists(base_path):
try:
os.makedirs(base_path)
except OSError:
# Except permission denied and potential race conditions
# in multi-threaded environments.
logger.error('Could not create config directory `{}`'.format(base_path))
return os.path.join(base_path, cls.CONFIG_FILE_NAME)
def trash_old_stuff(trashlist, trashpath, newpath):
if isinstance(trashlist, list):
for old_location in trashlist:
# Get the subfolders needed to be created
path_within_destination=os.path.relpath(old_location, trashpath)
# Create what will be the destination path
new_location=os.path.join(newpath, path_within_destination)
# Make sure all the relevant subfolders exist in the destination
if not os.path.exists(os.path.dirname(new_location)):
os.makedirs(os.path.dirname(new_location))
# Even though we've been double-checking paths all along, let's just make one last check
if os.path.exists(old_location) and os.path.isdir(newpath):
os.rename(old_location, new_location)
logging.info("Moving %s to %s\n" % (old_location, new_location))
else:
logging.error("One of %s or %s does not exist\n" % (old_location, new_location))
else:
logging.error("%s is not a valid list\n" % trashlist)
# Function that checks paths are writable
def mkdir(path, owner='root', group='root', perms=0o555, force=False):
"""Create a directory"""
log("Making dir {} {}:{} {:o}".format(path, owner, group,
perms))
uid = pwd.getpwnam(owner).pw_uid
gid = grp.getgrnam(group).gr_gid
realpath = os.path.abspath(path)
path_exists = os.path.exists(realpath)
if path_exists and force:
if not os.path.isdir(realpath):
log("Removing non-directory file {} prior to mkdir()".format(path))
os.unlink(realpath)
os.makedirs(realpath, perms)
elif not path_exists:
os.makedirs(realpath, perms)
os.chown(realpath, uid, gid)
os.chmod(realpath, perms)
def mkdir(path, owner='root', group='root', perms=0o555, force=False):
"""Create a directory"""
log("Making dir {} {}:{} {:o}".format(path, owner, group,
perms))
uid = pwd.getpwnam(owner).pw_uid
gid = grp.getgrnam(group).gr_gid
realpath = os.path.abspath(path)
path_exists = os.path.exists(realpath)
if path_exists and force:
if not os.path.isdir(realpath):
log("Removing non-directory file {} prior to mkdir()".format(path))
os.unlink(realpath)
os.makedirs(realpath, perms)
elif not path_exists:
os.makedirs(realpath, perms)
os.chown(realpath, uid, gid)
os.chmod(realpath, perms)
def download_sifts_from_ebi(identifier, override=False):
"""
Downloads a SIFTS xml from the EBI FTP to the filesystem.
:param identifier: (str) PDB ID
:param override: (boolean)
:return: (side effects)
"""
filename = "{}.xml.gz".format(identifier)
outputfile = os.path.join(config.db_root, config.db_sifts, filename)
os.makedirs(os.path.join(config.db_root, config.db_sifts), exist_ok=True)
url_root = config.ftp_sifts
url_endpoint = "{}.xml.gz".format(identifier)
url = url_root + url_endpoint
Downloader(url=url, outputfile=outputfile,
decompress=True, override=override)
return
def download_data_from_uniprot(identifier, file_format="fasta", override=False):
"""
Downloads a UniProt fasta, gff or txt to the filesystem.
:param identifier: (str) UniProt ID
:param file_format: (str) endpoint
:param override: (boolean)
:return: (side effects)
"""
file_format = file_format.lstrip('.')
if file_format in ['txt', 'fasta', 'gff']:
filename = "{}.{}".format(identifier, file_format)
outputfile = os.path.join(config.db_root, config.db_uniprot, filename)
os.makedirs(os.path.join(config.db_root, config.db_uniprot), exist_ok=True)
url_root = config.http_uniprot
url_endpoint = "{}.{}".format(identifier, file_format)
url = url_root + url_endpoint
Downloader(url=url, outputfile=outputfile,
decompress=True, override=override)
else:
raise ValueError("File format {} is not currently implemented..."
"".format(file_format))
return
def download_alignment_from_pfam(identifier, alignment_size="seed",
override=False):
"""
Downloads a MSA in Stockholm format from Pfam to the filesystem.
:param identifier: (str) PFam ID
:param alignment_size: (str) either "seed" or "full"
:param override: (boolean)
:return: (side effects)
"""
filename = "{}.sth".format(identifier)
outputfile = os.path.join(config.db_root, config.db_pfam, filename)
os.makedirs(os.path.join(config.db_root, config.db_pfam), exist_ok=True)
url_root = config.http_pfam
url_endpoint = ("family/{}/alignment/{}"
"".format(identifier, alignment_size))
url = url_root + url_endpoint
Downloader(url=url, outputfile=outputfile,
decompress=True, override=override)
return
def predict_episodes(self, model, episode_paths, n=None, out_dir=None, prefix="model/"):
if n is not None:
episode_paths = np.random.choice(episode_paths, n, replace=False)
if out_dir is not None:
os.makedirs(out_dir, exist_ok=True)
for ep, episode_path in enumerate(episode_paths):
episode = frame.load_episode(episode_path)
features = self.load_features_episode(episode)
prediction = model.predict_proba(features)
for i in range(len(prediction)):
episode.frames[i].info[prefix + "score"] = prediction[i]
episode.frames[i].info[prefix + "label"] = model.apply_threshold(prediction[i])
out_path = episode_path
if out_dir is not None:
out_path = os.path.join(out_dir, "{}.pkl.gz".format(ep))
frame.save_episode(out_path, episode)
def predict_episodes(self, model, episode_paths, n=None, out_dir=None, prefix="model/"):
if n is not None:
episode_paths = np.random.choice(episode_paths, n, replace=False)
if out_dir is not None:
os.makedirs(out_dir, exist_ok=True)
for ep, episode_path in enumerate(episode_paths):
episode = frame.load_episode(episode_path)
features = self.load_features_episode(episode)
prediction = model.predict_proba(features)
for i in range(len(prediction)):
episode.frames[i].info[prefix + "score"] = prediction[i]
episode.frames[i].info[prefix + "label"] = model.apply_threshold(prediction[i])
out_path = episode_path
if out_dir is not None:
out_path = os.path.join(out_dir, "{}.pkl.gz".format(ep))
frame.save_episode(out_path, episode)
def predict_episodes(self, model, episode_paths, n=None, out_dir=None, prefix="model/"):
if n is not None:
episode_paths = np.random.choice(episode_paths, n, replace=False)
if out_dir is not None:
os.makedirs(out_dir, exist_ok=True)
for ep, episode_path in enumerate(episode_paths):
episode = frame.load_episode(episode_path)
features = self.load_features_episode(episode)
prediction = model.predict_proba(features)
for i in range(len(prediction)):
episode.frames[i].info[prefix + "score"] = prediction[i]
episode.frames[i].info[prefix + "label"] = model.apply_threshold(prediction[i])
out_path = episode_path
if out_dir is not None:
out_path = os.path.join(out_dir, "{}.pkl.gz".format(ep))
frame.save_episode(out_path, episode)
def convert_episode_to_tf_records(base_directory, new_directory, dataloader, path):
episode = frame.load_episode(path)
features, labels = dataloader.load_features_and_labels_episode(episode)
assert path.rfind(base_directory) > -1
new_path = path[path.rfind(base_directory) + len(base_directory) + 1:]
new_path = os.path.splitext(new_path)[0]
new_path = os.path.splitext(new_path)[0]
new_path = os.path.join(new_directory, new_path + ".tfrecord")
options = tf.python_io.TFRecordOptions(
compression_type=tf.python_io.TFRecordCompressionType.GZIP)
os.makedirs(new_path, exist_ok=True)
for i, f in enumerate(episode.frames):
writer = tf.python_io.TFRecordWriter(
os.path.join(new_path, "{}.tfrecord".format(i)), options=options)
example = tf.train.Example(features=tf.train.Features(feature={
'action': _int64_feature([f.action]),
'label': _int64_feature([f.label] if f.label is not None else []),
'observation': _float_feature(f.observation.reshape(-1)),
'observation_shape': _int64_feature(f.observation.shape),
'image': _bytes_feature([f.image.tobytes()]),
'image_shape': _int64_feature(f.image.shape),
}))
writer.write(example.SerializeToString())
writer.close()
return new_path
def predict_episodes(self, model, episode_paths, n=None, out_dir=None, prefix="model/"):
if n is not None:
episode_paths = np.random.choice(episode_paths, n, replace=False)
if out_dir is not None:
os.makedirs(out_dir, exist_ok=True)
for ep, episode_path in enumerate(episode_paths):
episode = frame.load_episode(episode_path)
features = self.load_features_episode(episode)
prediction = model.predict_proba(features)
for i in range(len(prediction)):
episode.frames[i].info[prefix + "score"] = prediction[i]
episode.frames[i].info[prefix + "label"] = model.apply_threshold(prediction[i])
out_path = episode_path
if out_dir is not None:
out_path = os.path.join(out_dir, "{}.pkl.gz".format(ep))
frame.save_episode(out_path, episode)
def main():
"""Main function"""
if ARGS.type == "copy":
generate_fn = make_copy
elif ARGS.type == "reverse":
generate_fn = make_reverse
# Generate dataset
examples = list(generate_fn(ARGS.num_examples, ARGS.min_len, ARGS.max_len))
try:
os.makedirs(ARGS.output_dir)
except OSError:
if not os.path.isdir(ARGS.output_dir):
raise
# Write train data
train_sources, train_targets = zip(*examples)
write_parallel_text(train_sources, train_targets, ARGS.output_dir)