def add_security_data(adds, deletes):
if not hasattr(security_list, 'using_copy'):
raise Exception('add_security_data must be used within '
'security_list_copy context')
directory = os.path.join(
security_list.SECURITY_LISTS_DIR,
"leveraged_etf_list/20150127/20150125"
)
if not os.path.exists(directory):
os.makedirs(directory)
del_path = os.path.join(directory, "delete")
with open(del_path, 'w') as f:
for sym in deletes:
f.write(sym)
f.write('\n')
add_path = os.path.join(directory, "add")
with open(add_path, 'w') as f:
for sym in adds:
f.write(sym)
f.write('\n')
python类makedirs()的实例源码
generate_new_sample_saved_state.py 文件源码
项目:zipline-chinese
作者: zhanghan1990
项目源码
文件源码
阅读 47
收藏 0
点赞 0
评论 0
def write_state_to_disk(cls, state, emission_rate=None):
state_dir = cls.__module__ + '.' + cls.__name__
full_dir = base_state_dir + '/' + state_dir
if not os.path.exists(full_dir):
os.makedirs(full_dir)
if emission_rate is not None:
name = 'State_Version_' + emission_rate + \
str(state['obj_state'][VERSION_LABEL])
else:
name = 'State_Version_' + str(state['obj_state'][VERSION_LABEL])
full_path = full_dir + '/' + name
f = open(full_path, 'w')
pickle.dump(state, f)
f.close()
def write_sts_token(self, profile, access_key_id, secret_access_key, session_token):
""" Writes STS auth information to credentials file """
region = 'us-east-1'
output = 'json'
if not os.path.exists(self.creds_dir):
os.makedirs(self.creds_dir)
config = RawConfigParser()
if os.path.isfile(self.creds_file):
config.read(self.creds_file)
if not config.has_section(profile):
config.add_section(profile)
config.set(profile, 'output', output)
config.set(profile, 'region', region)
config.set(profile, 'aws_access_key_id', access_key_id)
config.set(profile, 'aws_secret_access_key', secret_access_key)
config.set(profile, 'aws_session_token', session_token)
with open(self.creds_file, 'w+') as configfile:
config.write(configfile)
print("Temporary credentials written to profile: %s" % profile)
print("Invoke using: aws --profile %s <service> <command>" % profile)
def write_stage_alerts(stage, path, alerts_file="alerts.list"):
alerts = load_alerts()
out_file = os.path.join(path, alerts_file)
if not os.path.exists(path):
os.makedirs(path)
out_handle = open(out_file, "w")
keys = ["metric", "threshold", "compare", "action", "message"]
if not alerts.has_key(stage):
martian.throw("No alerts found for stage %s" % stage)
for alert in alerts[stage]:
out_handle.write("#\n")
out_handle.write(alert["metric"]+"\n")
out_handle.write(str(alert["threshold"])+"\n")
out_handle.write(alert["compare"]+"\n")
out_handle.write(alert["action"]+"\n")
out_handle.write(alert["message"]+"\n")
out_handle.close()
def init_env(tree=['default']):
dump_loc = '/var/cache/acbs/tarballs/'
tmp_loc = '/var/cache/acbs/build/'
print("----- Welcome to ACBS - %s -----" % (acbs_version))
try:
if not os.path.isdir(dump_loc):
os.makedirs(dump_loc)
if not os.path.isdir(tmp_loc):
os.makedirs(tmp_loc)
except:
raise IOError('\033[93mFailed to make work directory\033[0m!')
if os.path.exists('/etc/acbs_forest.conf'):
tree_loc = parse_acbs_conf(tree[0])
if tree_loc is not None:
os.chdir(tree_loc)
else:
sys.exit(1)
else:
if not write_acbs_conf():
sys.exit(1)
return
def __init__(self):
self.config_dir = os.path.expanduser("~/.config/ytbrowser/")
self.defaults['format'] = "mkv"
self.defaults['quality'] = "bestvideo"
self.defaults['preferredcodec'] = "mp3"
self.defaults['preferredquality'] = 192
self.defaults['developerKey'] = "AIzaSyDFuK00HWV0fd1VMb17R8GghRVf_iQx9uk"
self.defaults['apiServiceName'] = "youtube"
self.defaults['apiVersion'] = "v3"
if not os.path.exists(self.config_dir):
os.makedirs(self.config_dir)
if not os.path.exists(self.config_dir + "config.yml"):
open(self.config_dir + "config.yml", "a").close()
with open(self.config_dir + "config.yml", 'r') as ymlfile:
self.user_settings = yaml.load(ymlfile)
if self.user_settings is None:
self.user_settings = {}
def dest_path(self, path):
path = os.path.normpath(path)
if not path.startswith(self.__dest_path_prefix):
path = os.path.join(self.__dest_path_prefix,
os.path.splitdrive(path)[1].lstrip(os.sep))
try:
os.makedirs(path)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
self.__dest_path = path
def dest_path(self, path):
path = os.path.normpath(path)
if not path.startswith(self.__dest_path_prefix):
path = os.path.join(self.__dest_path_prefix,
os.path.splitdrive(path)[1].lstrip(os.sep))
try:
os.makedirs(path)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
self.__dest_path = path
def get_dataset(dataset_path='Data/Train_Data'):
# Getting all data from data path:
try:
X = np.load('Data/npy_train_data/X.npy')
Y = np.load('Data/npy_train_data/Y.npy')
except:
inputs_path = dataset_path+'/input'
images = listdir(inputs_path) # Geting images
X = []
Y = []
for img in images:
img_path = inputs_path+'/'+img
x_img = get_img(img_path).astype('float32').reshape(64, 64, 3)
x_img /= 255.
y_img = get_img(img_path.replace('input/', 'mask/mask_')).astype('float32').reshape(64, 64, 1)
y_img /= 255.
X.append(x_img)
Y.append(y_img)
X = np.array(X)
Y = np.array(Y)
# Create dateset:
if not os.path.exists('Data/npy_train_data/'):
os.makedirs('Data/npy_train_data/')
np.save('Data/npy_train_data/X.npy', X)
np.save('Data/npy_train_data/Y.npy', Y)
X, X_test, Y, Y_test = train_test_split(X, Y, test_size=0.1, random_state=42)
return X, X_test, Y, Y_test
def train_model(model, X, X_test, Y, Y_test):
if not os.path.exists('Data/Checkpoints/'):
os.makedirs('Data/Checkpoints/')
checkpoints = []
checkpoints.append(ModelCheckpoint('Data/Checkpoints/best_weights.h5', monitor='val_loss', verbose=0, save_best_only=True, save_weights_only=True, mode='auto', period=1))
checkpoints.append(TensorBoard(log_dir='Data/Checkpoints/./logs', histogram_freq=0, write_graph=True, write_images=False, embeddings_freq=0, embeddings_layer_names=None, embeddings_metadata=None))
model.fit(X, Y, batch_size=batch_size, epochs=epochs, validation_data=(X_test, Y_test), shuffle=True, callbacks=checkpoints)
return model
def save_model(model):
if not os.path.exists('Data/Model/'):
os.makedirs('Data/Model/')
model_json = model.to_json()
with open("Data/Model/model.json", "w") as model_file:
model_file.write(model_json)
# serialize weights to HDF5
model.save_weights("Data/Model/weights.h5")
print('Model and weights saved')
return
def sync_pyfile(src, dest):
src = src + '.py'
src_dir = os.path.dirname(src)
logging.info('Syncing pyfile: %s -> %s.' % (src, dest))
if not os.path.exists(dest):
os.makedirs(dest)
shutil.copy(src, dest)
if os.path.isfile(os.path.join(src_dir, '__init__.py')):
shutil.copy(os.path.join(src_dir, '__init__.py'),
dest)
ensure_init(dest)
def __init__(self, basename, input_dir, verbose=False, replace_missing=True, filter_features=False):
'''Constructor'''
self.use_pickle = False # Turn this to true to save data as pickle (inefficient)
self.basename = basename
if basename in input_dir:
self.input_dir = input_dir
else:
self.input_dir = input_dir + "/" + basename + "/"
if self.use_pickle:
if os.path.exists ("tmp"):
self.tmp_dir = "tmp"
elif os.path.exists ("../tmp"):
self.tmp_dir = "../tmp"
else:
os.makedirs("tmp")
self.tmp_dir = "tmp"
info_file = os.path.join (self.input_dir, basename + '_public.info')
self.info = {}
self.getInfo (info_file)
self.feat_type = self.loadType (os.path.join(self.input_dir, basename + '_feat.type'), verbose=verbose)
self.data = {}
Xtr = self.loadData (os.path.join(self.input_dir, basename + '_train.data'), verbose=verbose, replace_missing=replace_missing)
Ytr = self.loadLabel (os.path.join(self.input_dir, basename + '_train.solution'), verbose=verbose)
Xva = self.loadData (os.path.join(self.input_dir, basename + '_valid.data'), verbose=verbose, replace_missing=replace_missing)
Xte = self.loadData (os.path.join(self.input_dir, basename + '_test.data'), verbose=verbose, replace_missing=replace_missing)
# Normally, feature selection should be done as part of a pipeline.
# However, here we do it as a preprocessing for efficiency reason
idx=[]
if filter_features: # add hoc feature selection, for the example...
fn = min(Xtr.shape[1], 1000)
idx = data_converter.tp_filter(Xtr, Ytr, feat_num=fn, verbose=verbose)
Xtr = Xtr[:,idx]
Xva = Xva[:,idx]
Xte = Xte[:,idx]
self.feat_idx = np.array(idx).ravel()
self.data['X_train'] = Xtr
self.data['Y_train'] = Ytr
self.data['X_valid'] = Xva
self.data['X_test'] = Xte
def mkdir(d):
if not os.path.exists(d):
os.makedirs(d)
def mkdir(d):
''' Create a new directory'''
if not os.path.exists(d):
os.makedirs(d)
def _create_home(self):
if not os.path.isdir(self._HOME + '/' + self._CONFIG_DIR):
os.makedirs(self._HOME + '/' + self._CONFIG_DIR)
with os.fdopen(os.open(self._HOME + '/' + self._CONFIG_DIR + '/' + self._CONFIG_FILE_NAME,
os.O_WRONLY | os.O_CREAT, 0o600), 'w'):
pass
with os.fdopen(os.open(self._HOME + '/' + self._CONFIG_DIR + '/' + self._CREDENTIALS_FILE_NAME,
os.O_WRONLY | os.O_CREAT, 0o600), 'w'):
pass
def gen_makeself(conf_dir,alias):
mkself_tmp = os.path.join(conf_dir,'tmp')
conf_mkself = os.path.join(conf_dir,'Installers')
if not os.path.exists(conf_mkself):
os.makedirs(conf_mkself)
if not os.path.exists(mkself_tmp):
os.makedirs(mkself_tmp)
if sys.platform.startswith('darwin'):
alias_app = os.path.join(conf_dir,'{}.app'.format(alias))
if os.path.exists(alias_app):
run_command('cp -R {} {}'.format(alias_app,mkself_tmp))
gen_osx_plist(alias,mkself_tmp)
gen_st_setup(alias,mkself_tmp)
mkself_installer = 'bash "{}" "{}" "{}/{}_Installer" "Stitch" bash st_setup.sh'.format(mkself_exe, mkself_tmp, conf_mkself,alias)
st_log.info(mkself_installer)
st_log.info(run_command(mkself_installer))
shutil.rmtree(mkself_tmp)
else:
binry_dir = os.path.join(conf_dir,'Binaries')
alias_dir = os.path.join(binry_dir, alias)
if os.path.exists(alias_dir):
run_command('cp -R {} {}'.format(alias_dir,mkself_tmp))
gen_lnx_daemon(alias,mkself_tmp)
gen_st_setup(alias,mkself_tmp)
mkself_installer = 'bash "{}" "{}" "{}/{}_Installer" "Stitch" bash st_setup.sh'.format(mkself_exe, mkself_tmp, conf_mkself,alias)
st_log.info(mkself_installer)
st_log.info(run_command(mkself_installer))
shutil.rmtree(mkself_tmp)
def save(self, path):
if not os.path.exists(path): os.makedirs(path)
self.src_vocab.save(path+"/vocab.src")
self.tgt_vocab.save(path+"/vocab.tgt")
self.m.save(path+"/params")
with open(path+"/args", "w") as f: pickle.dump(self.args, f)
def write_combined_file(lang_code, all_lang_paths, all_en_paths):
src_combined_filename = "train" + "_" + lang_code + "_en."+ lang_code + ".txt"
tgt_combined_filename = "train" + "_" + lang_code + "_en.en" + ".txt"
if not os.path.exists(output_dir + lang_code):
os.makedirs(output_dir+lang_code)
write_lang = []
write_en = []
for corp in all_lang_paths:
for filename in corp:
with open(filename) as f:
doc = f.read()
write_lang.append(doc)
for corp in all_en_paths:
for filename in corp:
with open(filename) as f:
doc = f.read()
write_en.append(doc)
for doc1, doc2 in zip(write_lang, write_en):
if len(doc1.split("\n"))!=len(doc2.split("\n")):
continue
else:
with open(output_dir + lang_code + "/" + src_combined_filename, 'a') as wf:
wf.write(doc1)
with open(output_dir + lang_code + "/" + tgt_combined_filename, 'a') as wf:
wf.write(doc2)
def new_dir(self, path):
try:
os.makedirs(path)
except Exception as e:
raise e