def save_images(self, X, imgfile, density=False):
ax = plt.axes()
x = X[:, 0]
y = X[:, 1]
if density:
xy = np.vstack([x,y])
z = scipy.stats.gaussian_kde(xy)(xy)
ax.scatter(x, y, c=z, marker='o', edgecolor='')
else:
ax.scatter(x, y, marker='o', c=range(x.shape[0]),
cmap=plt.cm.coolwarm)
if self.collection is not None:
self.collection.set_transform(ax.transData)
ax.add_collection(self.collection)
ax.text(x[0], y[0], str('start'), transform=ax.transAxes)
ax.axis([-0.2, 1.2, -0.2, 1.2])
fig = plt.gcf()
plt.savefig(imgfile)
plt.close()
python类savefig()的实例源码
def plot_volcano(logFC,p_val,sample_name,saveName,logFC_thresh):
fig=pl.figure()
## To plot and save
pl.scatter(logFC[(p_val>0.05)|(abs(logFC)<logFC_thresh)],-np.log10(p_val[(p_val>0.05)|(abs(logFC)<logFC_thresh)]),color='blue',alpha=0.5);
pl.scatter(logFC[(p_val<0.05)&(abs(logFC)>logFC_thresh)],-np.log10(p_val[(p_val<0.05)&(abs(logFC)>logFC_thresh)]),color='red');
pl.hlines(-np.log10(0.05),min(logFC),max(logFC))
pl.vlines(-logFC_thresh,min(-np.log10(p_val)),max(-np.log10(p_val)))
pl.vlines(logFC_thresh,min(-np.log10(p_val)),max(-np.log10(p_val)))
pl.xlim(-3,3)
pl.xlabel('Log Fold Change')
pl.ylabel('-log10(p-value)')
pl.savefig(saveName)
pl.close(fig)
# def plot_histograms(df_peaks,pntr_list):
#
# for pntr in pntr_list:
# colName =pntr[2]+'_Intragenic_position'
# pl.hist(df_peaks[colName])
# pl.xlabel(colName)
# pl.ylabel()
# pl.show()
def plot_prediction_MM(model, y_train, y_test, plot_title=''):
T = y_test.shape[0]
mx, vx, my, vy_noiseless, vy = model.predict_forward(T, prop_mode=PROP_MM)
T_train = y_train.shape[0]
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(np.arange(T_train), y_train[:, 0], 'k+-')
ttest = np.arange(T_train, T_train+T)
# pdb.set_trace()
ax.plot(ttest, my[:, 0], '-', color='b')
ax.fill_between(
ttest,
my[:, 0] + 2*np.sqrt(vy_noiseless[:, 0]),
my[:, 0] - 2*np.sqrt(vy_noiseless[:, 0]),
alpha=0.3, edgecolor='b', facecolor='b')
ax.fill_between(
ttest,
my[:, 0] + 2*np.sqrt(vy[:, 0]),
my[:, 0] - 2*np.sqrt(vy[:, 0]),
alpha=0.1, edgecolor='b', facecolor='b')
ax.plot(ttest, y_test, 'ro')
ax.set_xlim([T_train-5, T_train + T])
plt.title(plot_title)
plt.savefig('/tmp/kink_pred_MM_'+plot_title+'.pdf')
# plt.savefig('/tmp/kink_pred_MM_'+plot_title+'.png')
def plot(params_dir):
model_dirs = [name for name in os.listdir(params_dir)
if os.path.isdir(os.path.join(params_dir, name))]
df = defaultdict(list)
for model_dir in model_dirs:
df[re.sub('_bin_scaled_mono_True_ratio', '', model_dir)] = [
dd.io.load(path)['best_epoch']['validate_objective']
for path in glob.glob(os.path.join(
params_dir, model_dir) + '/*.h5')]
df = pd.DataFrame(dict([(k, pd.Series(v)) for k, v in df.iteritems()]))
df.to_csv(os.path.basename(os.path.normpath(params_dir)))
plt.figure(figsize=(16, 4), dpi=300)
g = sns.boxplot(df)
g.set_xticklabels(df.columns, rotation=45)
plt.tight_layout()
plt.savefig('{}_errors_box_plot.png'.format(
os.path.join(IMAGES_DIRECTORY,
os.path.basename(os.path.normpath(params_dir)))))
def plot_confusion_matrix(cm, label_list, title='Confusion matrix', cmap=None):
from matplotlib import pylab
cm = np.asarray(cm, dtype=np.float32)
for i, row in enumerate(cm):
cm[i] = cm[i] / np.sum(cm[i])
#import matplotlib.pyplot as plt
#plt.ion()
pylab.clf()
pylab.matshow(cm, fignum=False, cmap='Blues', vmin=0, vmax=1.0)
ax = pylab.axes()
ax.set_xticks(range(len(label_list)))
ax.set_xticklabels(label_list, rotation='vertical')
ax.xaxis.set_ticks_position('bottom')
ax.set_yticks(range(len(label_list)))
ax.set_yticklabels(label_list)
pylab.title(title)
pylab.colorbar()
pylab.grid(False)
pylab.xlabel('Predicted class')
pylab.ylabel('True class')
pylab.grid(False)
pylab.savefig('test.jpg')
pylab.show()
def plot_1d(dataset, nbins, data):
with sns.axes_style('white'):
plt.rc('font', weight='bold')
plt.rc('grid', lw=2)
plt.rc('lines', lw=3)
plt.figure(1)
plt.hist(data, bins=np.arange(nbins+1), color='blue')
plt.ylabel('Count', weight='bold', fontsize=24)
xticks = list(plt.gca().get_xticks())
while (nbins-1) / float(xticks[-1]) < 1.1:
xticks = xticks[:-1]
while xticks[0] < 0:
xticks = xticks[1:]
xticks.append(nbins-1)
xticks = list(sorted(xticks))
plt.gca().set_xticks(xticks)
plt.xlim([int(np.ceil(-0.05*nbins)),int(np.ceil(nbins*1.05))])
plt.legend(loc='upper right')
plt.savefig('plots/marginals-{0}.pdf'.format(dataset.replace('_','-')), bbox_inches='tight')
plt.clf()
plt.close()
def plot_2d(dataset, nbins, data, extra=None):
with sns.axes_style('white'):
plt.rc('font', weight='bold')
plt.rc('grid', lw=2)
plt.rc('lines', lw=2)
rows, cols = nbins
im = np.zeros(nbins)
for i in xrange(rows):
for j in xrange(cols):
im[i,j] = ((data[:,0] == i) & (data[:,1] == j)).sum()
plt.imshow(im, cmap='gray_r', interpolation='none')
if extra is not None:
dataset += extra
plt.savefig('plots/marginals-{0}.pdf'.format(dataset.replace('_','-')), bbox_inches='tight')
plt.clf()
plt.close()
def plot_1d(dataset, nbins):
data = np.loadtxt('experiments/uci/data/splits/{0}_all.csv'.format(dataset), skiprows=1, delimiter=',')[:,-1]
with sns.axes_style('white'):
plt.rc('font', weight='bold')
plt.rc('grid', lw=2)
plt.rc('lines', lw=3)
plt.figure(1)
plt.hist(data, bins=np.arange(nbins+1), color='blue')
plt.ylabel('Count', weight='bold', fontsize=24)
xticks = list(plt.gca().get_xticks())
while (nbins-1) / float(xticks[-1]) < 1.1:
xticks = xticks[:-1]
while xticks[0] < 0:
xticks = xticks[1:]
xticks.append(nbins-1)
xticks = list(sorted(xticks))
plt.gca().set_xticks(xticks)
plt.xlim([int(np.ceil(-0.05*nbins)),int(np.ceil(nbins*1.05))])
plt.legend(loc='upper right')
plt.savefig('plots/marginals-{0}.pdf'.format(dataset.replace('_','-')), bbox_inches='tight')
plt.clf()
plt.close()
def plot_2d(dataset, nbins, data=None, extra=None):
if data is None:
data = np.loadtxt('experiments/uci/data/splits/{0}_all.csv'.format(dataset), skiprows=1, delimiter=',')[:,-2:]
with sns.axes_style('white'):
plt.rc('font', weight='bold')
plt.rc('grid', lw=2)
plt.rc('lines', lw=2)
rows, cols = nbins
im = np.zeros(nbins)
for i in xrange(rows):
for j in xrange(cols):
im[i,j] = ((data[:,0] == i) & (data[:,1] == j)).sum()
plt.imshow(im, cmap='gray_r', interpolation='none')
if extra is not None:
dataset += extra
plt.savefig('plots/marginals-{0}.pdf'.format(dataset.replace('_','-')), bbox_inches='tight')
plt.clf()
plt.close()
def plotValueFunction(self, valueFunction, prefix):
'''3d plot of a value function.'''
fig, ax = plt.subplots(subplot_kw = dict(projection = '3d'))
X, Y = np.meshgrid(np.arange(self.numCols), np.arange(self.numRows))
Z = valueFunction.reshape(self.numRows, self.numCols)
for i in xrange(len(X)):
for j in xrange(len(X[i])/2):
tmp = X[i][j]
X[i][j] = X[i][len(X[i]) - j - 1]
X[i][len(X[i]) - j - 1] = tmp
my_col = cm.jet(np.random.rand(Z.shape[0],Z.shape[1]))
ax.plot_surface(X, Y, Z, rstride = 1, cstride = 1,
cmap = plt.get_cmap('jet'))
plt.gca().view_init(elev=30, azim=30)
plt.savefig(self.outputPath + prefix + 'value_function.png')
plt.close()
def plotLine(self, x_vals, y_vals, x_label, y_label, title, filename=None):
plt.clf()
plt.xlabel(x_label)
plt.xlim(((min(x_vals) - 0.5), (max(x_vals) + 0.5)))
plt.ylabel(y_label)
plt.ylim(((min(y_vals) - 0.5), (max(y_vals) + 0.5)))
plt.title(title)
plt.plot(x_vals, y_vals, c='k', lw=2)
#plt.plot(x_vals, len(x_vals) * y_vals[0], c='r', lw=2)
if filename == None:
plt.show()
else:
plt.savefig(self.outputPath + filename)
demo_mi.py 文件源码
项目:Building-Machine-Learning-Systems-With-Python-Second-Edition
作者: PacktPublishing
项目源码
文件源码
阅读 21
收藏 0
点赞 0
评论 0
def plot_entropy():
pylab.clf()
pylab.figure(num=None, figsize=(5, 4))
title = "Entropy $H(X)$"
pylab.title(title)
pylab.xlabel("$P(X=$coin will show heads up$)$")
pylab.ylabel("$H(X)$")
pylab.xlim(xmin=0, xmax=1.1)
x = np.arange(0.001, 1, 0.001)
y = -x * np.log2(x) - (1 - x) * np.log2(1 - x)
pylab.plot(x, y)
# pylab.xticks([w*7*24 for w in [0,1,2,3,4]], ['week %i'%(w+1) for w in
# [0,1,2,3,4]])
pylab.autoscale(tight=True)
pylab.grid(True)
filename = "entropy_demo.png"
pylab.savefig(os.path.join(CHART_DIR, filename), bbox_inches="tight")
utils.py 文件源码
项目:Building-Machine-Learning-Systems-With-Python-Second-Edition
作者: PacktPublishing
项目源码
文件源码
阅读 20
收藏 0
点赞 0
评论 0
def plot_feat_importance(feature_names, clf, name):
pylab.clf()
coef_ = clf.coef_
important = np.argsort(np.absolute(coef_.ravel()))
f_imp = feature_names[important]
coef = coef_.ravel()[important]
inds = np.argsort(coef)
f_imp = f_imp[inds]
coef = coef[inds]
xpos = np.array(range(len(coef)))
pylab.bar(xpos, coef, width=1)
pylab.title('Feature importance for %s' % (name))
ax = pylab.gca()
ax.set_xticks(np.arange(len(coef)))
labels = ax.set_xticklabels(f_imp)
for label in labels:
label.set_rotation(90)
filename = name.replace(" ", "_")
pylab.savefig(os.path.join(
CHART_DIR, "feat_imp_%s.png" % filename), bbox_inches="tight")
utils.py 文件源码
项目:Building-Machine-Learning-Systems-With-Python-Second-Edition
作者: PacktPublishing
项目源码
文件源码
阅读 19
收藏 0
点赞 0
评论 0
def plot_feat_importance(feature_names, clf, name):
pylab.figure(num=None, figsize=(6, 5))
coef_ = clf.coef_
important = np.argsort(np.absolute(coef_.ravel()))
f_imp = feature_names[important]
coef = coef_.ravel()[important]
inds = np.argsort(coef)
f_imp = f_imp[inds]
coef = coef[inds]
xpos = np.array(list(range(len(coef))))
pylab.bar(xpos, coef, width=1)
pylab.title('Feature importance for %s' % (name))
ax = pylab.gca()
ax.set_xticks(np.arange(len(coef)))
labels = ax.set_xticklabels(f_imp)
for label in labels:
label.set_rotation(90)
filename = name.replace(" ", "_")
pylab.savefig(os.path.join(
CHART_DIR, "feat_imp_%s.png" % filename), bbox_inches="tight")
utils.py 文件源码
项目:Building-Machine-Learning-Systems-With-Python-Second-Edition
作者: PacktPublishing
项目源码
文件源码
阅读 17
收藏 0
点赞 0
评论 0
def plot_confusion_matrix(cm, genre_list, name, title):
pylab.clf()
pylab.matshow(cm, fignum=False, cmap='Blues', vmin=0, vmax=1.0)
ax = pylab.axes()
ax.set_xticks(range(len(genre_list)))
ax.set_xticklabels(genre_list)
ax.xaxis.set_ticks_position("bottom")
ax.set_yticks(range(len(genre_list)))
ax.set_yticklabels(genre_list)
pylab.title(title)
pylab.colorbar()
pylab.grid(False)
pylab.show()
pylab.xlabel('Predicted class')
pylab.ylabel('True class')
pylab.grid(False)
pylab.savefig(
os.path.join(CHART_DIR, "confusion_matrix_%s.png" % name), bbox_inches="tight")
utils.py 文件源码
项目:Building-Machine-Learning-Systems-With-Python-Second-Edition
作者: PacktPublishing
项目源码
文件源码
阅读 22
收藏 0
点赞 0
评论 0
def plot_roc(auc_score, name, tpr, fpr, label=None):
pylab.clf()
pylab.figure(num=None, figsize=(5, 4))
pylab.grid(True)
pylab.plot([0, 1], [0, 1], 'k--')
pylab.plot(fpr, tpr)
pylab.fill_between(fpr, tpr, alpha=0.5)
pylab.xlim([0.0, 1.0])
pylab.ylim([0.0, 1.0])
pylab.xlabel('False Positive Rate')
pylab.ylabel('True Positive Rate')
pylab.title('ROC curve (AUC = %0.2f) / %s' %
(auc_score, label), verticalalignment="bottom")
pylab.legend(loc="lower right")
filename = name.replace(" ", "_")
pylab.savefig(
os.path.join(CHART_DIR, "roc_" + filename + ".png"), bbox_inches="tight")
def plotKChart(self, misClassDict, saveFigPath):
kList = []
misRateList = []
for k, misClassNum in misClassDict.iteritems():
kList.append(k)
misRateList.append(1.0 - 1.0/k*misClassNum)
fig = plt.figure(saveFigPath)
plt.plot(kList, misRateList, 'r--')
plt.title(saveFigPath)
plt.xlabel('k Num.')
plt.ylabel('Misclassified Rate')
plt.legend(saveFigPath)
plt.grid(True)
plt.savefig(saveFigPath)
plt.show()
################################### PART3 TEST ########################################
# ??
def test_plot_timeseries2():
filename = abspath(join(testdir, 'plot_timeseries2.png'))
if isfile(filename):
os.remove(filename)
periods = 5
index = pd.date_range('1/1/2016', periods=periods, freq='H')
data = np.array([[1,2,3], [4,5,6], [7,8,9], [10,11,12], [13,14,15]])
df = pd.DataFrame(data=data, index=index, columns=['A', 'B', 'C'])
tfilter = pd.Series(data = (df.index < index[3]), index = df.index)
plt.figure()
pecos.graphics.plot_timeseries(df,tfilter, yaxis_min=0, yaxis_max=20)
plt.savefig(filename, format='png')
plt.close()
assert_true(isfile(filename))
def test_plot_heatmap1():
filename = abspath(join(testdir, 'plot_heatmap1.png'))
if isfile(filename):
os.remove(filename)
periods = 5
index = pd.date_range('1/1/2016', periods=periods, freq='D')
data = np.random.rand(periods, 4)
df = pd.DataFrame(data=data, index=index, columns=['A', 'B', 'C', 'D'])
plt.figure()
pecos.graphics.plot_heatmap(df)
plt.savefig(filename, format='png', bbox_inches='tight', pad_inches = 0)
plt.close()
assert_true(isfile(filename))
def test_plot_doy_heatmap1():
filename = abspath(join(testdir, 'plot_doy_heatmap1.png'))
if isfile(filename):
os.remove(filename)
periods = 5*24 # 5 days
index = pd.date_range('3/1/2016', periods=periods, freq='H')
data = np.random.rand(periods)
df = pd.DataFrame(data=data, index=index, columns=['A'])
plt.figure()
pecos.graphics.plot_doy_heatmap(df['A'])
plt.savefig(filename, format='png')
plt.close()
assert_true(isfile(filename))
def test_plot_doy_heatmap2():
filename = abspath(join(testdir, 'plot_doy_heatmap2.png'))
if isfile(filename):
os.remove(filename)
periods = 365*12
index = pd.date_range('1/1/2016', periods=periods, freq='2H')
data = np.random.rand(periods)
df = pd.DataFrame(data=data, index=index, columns=['A'])
overlay = pd.DataFrame(index=[1,100,200,300,365],
data={'A': [40000,20000,60000,10000,5000],
'B': [60000,70000,75000,50000,65000]})
plt.figure()
pecos.graphics.plot_doy_heatmap(df['A'], cmap='gray', overlay=overlay)
plt.savefig(filename, format='png')
plt.close()
assert_true(isfile(filename))
def fit_data():
data=np.loadtxt('data.dat')
print(data)
params = dict()
params["c"] = {"min" : -np.inf,"max" : np.inf}
result = qudi_fitting.make_lorentzian_fit(axis=data[:,0], data=data[:,3], add_parameters=params)
print(result.fit_report())
plt.plot(data[:,0],-data[:,3]+2,"b-o",label="data mean")
# plt.plot(data[:,0],data[:,1],label="data")
# plt.plot(data[:,0],data[:,2],label="data")
plt.plot(data[:,0],-result.best_fit+2,"r-",linewidth=2.,label="fit")
# plt.plot(data[:,0],result.init_fit,label="init")
plt.xlabel("time (ns)")
plt.ylabel("polarization transfer (arb. u.)")
plt.legend(loc=1)
# plt.savefig("pol20_24repetition_pol.pdf")
# plt.savefig("pol20_24repetition_pol.png")
plt.show()
savedata=[[data[ii,0],-data[ii,3]+2,-result.best_fit[ii]+2] for ii in range(len(data[:,0]))]
np.savetxt("pol_data_fit.csv",savedata)
# print(result.params)
print(result.params)
def plot_confusion_matrix(cm, plot_title, filename, genres=None):
if not genres:
genres = GENRES
pylab.clf()
pylab.matshow(cm, fignum=False, cmap='Blues', vmin=0, vmax=100.0)
axes = pylab.axes()
axes.set_xticks(range(len(genres)))
axes.set_xticklabels(genres, rotation=45)
axes.set_yticks(range(len(genres)))
axes.set_yticklabels(genres)
axes.xaxis.set_ticks_position("bottom")
pylab.title(plot_title, fontsize=14)
pylab.colorbar()
pylab.xlabel('Predicted class', fontsize=12)
pylab.ylabel('Correct class', fontsize=12)
pylab.grid(False)
#pylab.show()
pylab.savefig(os.path.join(PLOTS_DIR, "cm_%s.eps" % filename), bbox_inches="tight")
def plot_confusion_matrix(cm, label_list, title='Confusion matrix', cmap=None):
from matplotlib import pylab
cm = np.asarray(cm, dtype=np.float32)
for i, row in enumerate(cm):
cm[i] = cm[i] / np.sum(cm[i])
#import matplotlib.pyplot as plt
#plt.ion()
pylab.clf()
pylab.matshow(cm, fignum=False, cmap='Blues', vmin=0, vmax=1.0)
ax = pylab.axes()
ax.set_xticks(range(len(label_list)))
ax.set_xticklabels(label_list, rotation='vertical')
ax.xaxis.set_ticks_position('bottom')
ax.set_yticks(range(len(label_list)))
ax.set_yticklabels(label_list)
pylab.title(title)
pylab.colorbar()
pylab.grid(False)
pylab.xlabel('Predicted class')
pylab.ylabel('True class')
pylab.grid(False)
pylab.savefig('test.jpg')
pylab.show()
def plot_feat_importance(feature_names, clf, name):
pylab.figure(num=None, figsize=(6, 5))
coef_ = clf.coef_
important = np.argsort(np.absolute(coef_.ravel()))
f_imp = feature_names[important]
coef = coef_.ravel()[important]
inds = np.argsort(coef)
f_imp = f_imp[inds]
coef = coef[inds]
xpos = np.array(list(range(len(coef))))
pylab.bar(xpos, coef, width=1)
pylab.title('Feature importance for %s' % (name))
ax = pylab.gca()
ax.set_xticks(np.arange(len(coef)))
labels = ax.set_xticklabels(f_imp)
for label in labels:
label.set_rotation(90)
filename = name.replace(" ", "_")
pylab.savefig(os.path.join(
CHART_DIR, "feat_imp_%s.png" % filename), bbox_inches="tight")
def plot_feat_importance(feature_names, clf, name):
pylab.figure(num=None, figsize=(6, 5))
coef_ = clf.coef_
important = np.argsort(np.absolute(coef_.ravel()))
f_imp = feature_names[important]
coef = coef_.ravel()[important]
inds = np.argsort(coef)
f_imp = f_imp[inds]
coef = coef[inds]
xpos = np.array(list(range(len(coef))))
pylab.bar(xpos, coef, width=1)
pylab.title('Feature importance for %s' % (name))
ax = pylab.gca()
ax.set_xticks(np.arange(len(coef)))
labels = ax.set_xticklabels(f_imp)
for label in labels:
label.set_rotation(90)
filename = name.replace(" ", "_")
pylab.savefig(os.path.join(
CHART_DIR, "feat_imp_%s.png" % filename), bbox_inches="tight")
def nmf(fdoc, fvocab):
T = 100
nmf = NMF(fdoc, fvocab)
nmf.train(T)
nmf.get_words()
# print(mf.R)
plt.figure()
plt.plot(range(1,T+1),nmf.objective)
plt.xticks(np.linspace(1,T,10))
plt.xlabel('Iterations')
plt.ylabel('Objective')
plt.title('Variation of objective with iterations')
plt.savefig('hw5_2a.png')
plt.show()
def gp_partd(Xtrain,ytrain,Xtest,ytest):
gp = gaussian_process(Xtrain[:,3],ytrain,Xtrain[:,3],ytrain)
gp.init_kernel_matrices(b=5,var=2)
gp.predict_test()
x = np.asarray(Xtrain[:,3]).flatten()
xsortind = np.argsort(x)
y1 = np.asarray(ytrain).flatten()
y2 = np.asarray(gp.test_predictions).flatten()
plt.figure()
plt.scatter(x[xsortind],y1[xsortind])
plt.plot(x[xsortind],y2[xsortind],'b-')
plt.xlabel('Car Weight (Dimension 4)')
plt.ylabel('Outcome')
plt.title('Visualizing model through single dimension')
plt.savefig('hw3_gaussian_dim4_viz')
plt.show()
def test_plot_fragility_curve1():
from scipy.stats import lognorm
filename = abspath(join(testdir, 'plot_fragility_curve1.png'))
if isfile(filename):
os.remove(filename)
FC = wntr.scenario.FragilityCurve()
FC.add_state('Minor', 1, {'Default': lognorm(0.5,scale=0.3)})
FC.add_state('Major', 2, {'Default': lognorm(0.5,scale=0.7)})
plt.figure()
wntr.graphics.plot_fragility_curve(FC)
plt.savefig(filename, format='png')
plt.close()
assert_true(isfile(filename))
def plot_true_and_augmented_data(sample,noised_sample,label,n_examples):
output_dir = os.path.split(FLAGS.output)[0]
# Save augmented data
plt.clf()
fig, ax = plt.subplots(3,1)
for t in range(noised_sample.shape[1]):
ax[t].plot(noised_sample[:,t])
ax[t].set_xlabel('time (samples)')
ax[t].set_ylabel('amplitude')
ax[0].set_title('window {:03d}, cluster_id: {}'.format(n_examples,label))
plt.savefig(os.path.join(output_dir, "augmented_data",
'augmented_{:03d}.pdf'.format(n_examples)))
plt.close()
# Save true data
plt.clf()
fig, ax = plt.subplots(3,1)
for t in range(sample.shape[1]):
ax[t].plot(sample[:,t])
ax[t].set_xlabel('time (samples)')
ax[t].set_ylabel('amplitude')
ax[0].set_title('window {:03d}, cluster_id: {}'.format(n_examples,label))
plt.savefig(os.path.join(output_dir, "true_data",
'true__{:03d}.pdf'.format(n_examples)))
plt.close()