def plot_prof_2(self, mod, species, xlim1, xlim2):
"""
Plot one species for cycle between xlim1 and xlim2
Parameters
----------
mod : string or integer
Model to plot, same as cycle number.
species : list
Which species to plot.
xlim1, xlim2 : float
Mass coordinate range.
"""
mass=self.se.get(mod,'mass')
Xspecies=self.se.get(mod,'yps',species)
pyl.plot(mass,Xspecies,'-',label=str(mod)+', '+species)
pyl.xlim(xlim1,xlim2)
pyl.legend()
python类xlim()的实例源码
def plot_volcano(logFC,p_val,sample_name,saveName,logFC_thresh):
fig=pl.figure()
## To plot and save
pl.scatter(logFC[(p_val>0.05)|(abs(logFC)<logFC_thresh)],-np.log10(p_val[(p_val>0.05)|(abs(logFC)<logFC_thresh)]),color='blue',alpha=0.5);
pl.scatter(logFC[(p_val<0.05)&(abs(logFC)>logFC_thresh)],-np.log10(p_val[(p_val<0.05)&(abs(logFC)>logFC_thresh)]),color='red');
pl.hlines(-np.log10(0.05),min(logFC),max(logFC))
pl.vlines(-logFC_thresh,min(-np.log10(p_val)),max(-np.log10(p_val)))
pl.vlines(logFC_thresh,min(-np.log10(p_val)),max(-np.log10(p_val)))
pl.xlim(-3,3)
pl.xlabel('Log Fold Change')
pl.ylabel('-log10(p-value)')
pl.savefig(saveName)
pl.close(fig)
# def plot_histograms(df_peaks,pntr_list):
#
# for pntr in pntr_list:
# colName =pntr[2]+'_Intragenic_position'
# pl.hist(df_peaks[colName])
# pl.xlabel(colName)
# pl.ylabel()
# pl.show()
def plot(m, Xtrain, ytrain):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
mean, var = m.predict_y(xx)
mean = np.reshape(mean, (xx.shape[0], 1))
var = np.reshape(var, (xx.shape[0], 1))
if isinstance(m, aep.SDGPR):
zu = m.sgp_layers[0].zu
elif isinstance(m, vfe.SGPR_collapsed):
zu = m.zu
else:
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(Xtrain, ytrain, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
# pdb.set_trace()
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
def test_plot_error_ellipse(self):
# Generate random data
x = np.random.normal(0, 1, 300)
s = np.array([2.0, 2.0])
y1 = np.random.normal(s[0] * x)
y2 = np.random.normal(s[1] * x)
data = np.array([y1, y2])
# Calculate covariance and plot error ellipse
cov = np.cov(data)
plot_error_ellipse([0.0, 0.0], cov)
debug = False
if debug:
plt.scatter(data[0, :], data[1, :])
plt.xlim([-8, 8])
plt.ylim([-8, 8])
plt.show()
plt.clf()
def plot_1d(dataset, nbins, data):
with sns.axes_style('white'):
plt.rc('font', weight='bold')
plt.rc('grid', lw=2)
plt.rc('lines', lw=3)
plt.figure(1)
plt.hist(data, bins=np.arange(nbins+1), color='blue')
plt.ylabel('Count', weight='bold', fontsize=24)
xticks = list(plt.gca().get_xticks())
while (nbins-1) / float(xticks[-1]) < 1.1:
xticks = xticks[:-1]
while xticks[0] < 0:
xticks = xticks[1:]
xticks.append(nbins-1)
xticks = list(sorted(xticks))
plt.gca().set_xticks(xticks)
plt.xlim([int(np.ceil(-0.05*nbins)),int(np.ceil(nbins*1.05))])
plt.legend(loc='upper right')
plt.savefig('plots/marginals-{0}.pdf'.format(dataset.replace('_','-')), bbox_inches='tight')
plt.clf()
plt.close()
def plot_1d(dataset, nbins):
data = np.loadtxt('experiments/uci/data/splits/{0}_all.csv'.format(dataset), skiprows=1, delimiter=',')[:,-1]
with sns.axes_style('white'):
plt.rc('font', weight='bold')
plt.rc('grid', lw=2)
plt.rc('lines', lw=3)
plt.figure(1)
plt.hist(data, bins=np.arange(nbins+1), color='blue')
plt.ylabel('Count', weight='bold', fontsize=24)
xticks = list(plt.gca().get_xticks())
while (nbins-1) / float(xticks[-1]) < 1.1:
xticks = xticks[:-1]
while xticks[0] < 0:
xticks = xticks[1:]
xticks.append(nbins-1)
xticks = list(sorted(xticks))
plt.gca().set_xticks(xticks)
plt.xlim([int(np.ceil(-0.05*nbins)),int(np.ceil(nbins*1.05))])
plt.legend(loc='upper right')
plt.savefig('plots/marginals-{0}.pdf'.format(dataset.replace('_','-')), bbox_inches='tight')
plt.clf()
plt.close()
def plotLine(self, x_vals, y_vals, x_label, y_label, title, filename=None):
plt.clf()
plt.xlabel(x_label)
plt.xlim(((min(x_vals) - 0.5), (max(x_vals) + 0.5)))
plt.ylabel(y_label)
plt.ylim(((min(y_vals) - 0.5), (max(y_vals) + 0.5)))
plt.title(title)
plt.plot(x_vals, y_vals, c='k', lw=2)
#plt.plot(x_vals, len(x_vals) * y_vals[0], c='r', lw=2)
if filename == None:
plt.show()
else:
plt.savefig(self.outputPath + filename)
demo_mi.py 文件源码
项目:Building-Machine-Learning-Systems-With-Python-Second-Edition
作者: PacktPublishing
项目源码
文件源码
阅读 24
收藏 0
点赞 0
评论 0
def plot_entropy():
pylab.clf()
pylab.figure(num=None, figsize=(5, 4))
title = "Entropy $H(X)$"
pylab.title(title)
pylab.xlabel("$P(X=$coin will show heads up$)$")
pylab.ylabel("$H(X)$")
pylab.xlim(xmin=0, xmax=1.1)
x = np.arange(0.001, 1, 0.001)
y = -x * np.log2(x) - (1 - x) * np.log2(1 - x)
pylab.plot(x, y)
# pylab.xticks([w*7*24 for w in [0,1,2,3,4]], ['week %i'%(w+1) for w in
# [0,1,2,3,4]])
pylab.autoscale(tight=True)
pylab.grid(True)
filename = "entropy_demo.png"
pylab.savefig(os.path.join(CHART_DIR, filename), bbox_inches="tight")
plot_kmeans_example.py 文件源码
项目:Building-Machine-Learning-Systems-With-Python-Second-Edition
作者: PacktPublishing
项目源码
文件源码
阅读 16
收藏 0
点赞 0
评论 0
def plot_clustering(x, y, title, mx=None, ymax=None, xmin=None, km=None):
pylab.figure(num=None, figsize=(8, 6))
if km:
pylab.scatter(x, y, s=50, c=km.predict(list(zip(x, y))))
else:
pylab.scatter(x, y, s=50)
pylab.title(title)
pylab.xlabel("Occurrence word 1")
pylab.ylabel("Occurrence word 2")
pylab.autoscale(tight=True)
pylab.ylim(ymin=0, ymax=1)
pylab.xlim(xmin=0, xmax=1)
pylab.grid(True, linestyle='-', color='0.75')
return pylab
utils.py 文件源码
项目:Building-Machine-Learning-Systems-With-Python-Second-Edition
作者: PacktPublishing
项目源码
文件源码
阅读 17
收藏 0
点赞 0
评论 0
def plot_roc(auc_score, name, tpr, fpr, label=None):
pylab.clf()
pylab.figure(num=None, figsize=(5, 4))
pylab.grid(True)
pylab.plot([0, 1], [0, 1], 'k--')
pylab.plot(fpr, tpr)
pylab.fill_between(fpr, tpr, alpha=0.5)
pylab.xlim([0.0, 1.0])
pylab.ylim([0.0, 1.0])
pylab.xlabel('False Positive Rate')
pylab.ylabel('True Positive Rate')
pylab.title('ROC curve (AUC = %0.2f) / %s' %
(auc_score, label), verticalalignment="bottom")
pylab.legend(loc="lower right")
filename = name.replace(" ", "_")
pylab.savefig(
os.path.join(CHART_DIR, "roc_" + filename + ".png"), bbox_inches="tight")
def plot_roc(y_test, y_pred, label=''):
"""Compute ROC curve and ROC area"""
fpr, tpr, _ = roc_curve(y_test, y_pred)
roc_auc = auc(fpr, tpr)
# Plot of a ROC curve for a specific class
plt.figure()
plt.plot(fpr, tpr, label='ROC curve (area = %0.2f)' % roc_auc)
plt.plot([0, 1], [0, 1], 'k--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver operating characteristic' + label)
plt.legend(loc="lower right")
plt.show()
def plotSpeedupFigure(AllInfo, maxWorker=1, **kwargs):
pylab.figure(2)
xs = AllInfo['nWorker']
ts_mono = AllInfo['t_monolithic']
xgrid = np.linspace(0, maxWorker + 0.1, 100)
pylab.plot(xgrid, xgrid, 'y--', label='ideal parallel')
for method in getMethodNames(**kwargs):
speedupRatio = ts_mono / AllInfo['t_' + method]
pylab.plot(xs, speedupRatio, 'o-',
label=method,
color=ColorMap[method],
markeredgecolor=ColorMap[method])
pylab.xlim([-0.2, maxWorker + 0.5])
pylab.ylim([0, maxWorker + 0.5])
pylab.legend(loc='upper left')
pylab.xlabel('Number of Workers')
pylab.ylabel('Speedup over Monolithic')
def plotBoundVsAlph(alphaVals=np.linspace(.001, 3, 1000),
beta1=0.5):
exactVals = cD_exact(alphaVals, beta1)
boundVals = cD_bound(alphaVals, beta1)
assert np.all(exactVals >= boundVals)
pylab.plot(alphaVals, exactVals, 'k-', linewidth=LINEWIDTH)
pylab.plot(alphaVals, boundVals, 'r--', linewidth=LINEWIDTH)
pylab.xlabel("alpha", fontsize=FONTSIZE)
pylab.ylabel(" ", fontsize=FONTSIZE)
pylab.xlim([np.min(alphaVals) - 0.1, np.max(alphaVals) + 0.1])
pylab.ylim([np.min(exactVals) - 0.05, np.max(exactVals) + 0.05])
pylab.xticks(np.arange(np.max(alphaVals) + 1))
pylab.legend(['c_D exact',
'c_D surrogate'],
fontsize=LEGENDSIZE,
loc='lower right')
pylab.tick_params(axis='both', which='major', labelsize=TICKSIZE)
def showExampleDocs(pylab=None, nrows=3, ncols=3):
if pylab is None:
from matplotlib import pylab
Data = get_data(seed=0, nObsPerDoc=200)
PRNG = np.random.RandomState(0)
chosenDocs = PRNG.choice(Data.nDoc, nrows * ncols, replace=False)
for ii, d in enumerate(chosenDocs):
start = Data.doc_range[d]
stop = Data.doc_range[d + 1]
Xd = Data.X[start:stop]
pylab.subplot(nrows, ncols, ii + 1)
pylab.plot(Xd[:, 0], Xd[:, 1], 'k.')
pylab.axis('image')
pylab.xlim([-1.5, 1.5])
pylab.ylim([-1.5, 1.5])
pylab.xticks([])
pylab.yticks([])
pylab.tight_layout()
# Set Toy Parameters
###########################################################
def _xlimrev(self):
''' reverse xrange'''
xmax,xmin=pyl.xlim()
pyl.xlim(xmin,xmax)
def plot_prof_1(self, mod, species, xlim1, xlim2, ylim1, ylim2,
symbol=None):
"""
plot one species for cycle between xlim1 and xlim2
Parameters
----------
mod : string or integer
Model to plot, same as cycle number.
species : list
Which species to plot.
xlim1, xlim2 : float
Mass coordinate range.
ylim1, ylim2 : float
Mass fraction coordinate range.
symbol : string, optional
Which symbol you want to use. If None symbol is set to '-'.
The default is None.
"""
DataPlot.plot_prof_1(self,species,mod,xlim1,xlim2,ylim1,ylim2,symbol)
"""
tot_mass=self.se.get(mod,'total_mass')
age=self.se.get(mod,'age')
mass=self.se.get(mod,'mass')
Xspecies=self.se.get(mod,'iso_massf',species)
pyl.plot(mass,np.log10(Xspecies),'-',label=species)
pyl.xlim(xlim1,xlim2)
pyl.ylim(ylim1,ylim2)
pyl.legend()
pl.xlabel('$Mass$ $coordinate$', fontsize=20)
pl.ylabel('$X_{i}$', fontsize=20)
pl.title('Mass='+str(tot_mass)+', Time='+str(age)+' years, cycle='+str(mod))
"""
def plot_prof_sparse(self, mod, species, xlim1, xlim2, ylim1, ylim2,
sparse, symbol):
"""
plot one species for cycle between xlim1 and xlim2.
Parameters
----------
species : list
which species to plot.
mod : string or integer
Model (cycle) to plot.
xlim1, xlim2 : float
Mass coordinate range.
ylim1, ylim2 : float
Mass fraction coordinate range.
sparse : integer
Sparsity factor for points.
symbol : string
which symbol you want to use?
"""
mass=self.se.get(mod,'mass')
Xspecies=self.se.get(mod,'yps',species)
pyl.plot(mass[0:len(mass):sparse],np.log10(Xspecies[0:len(Xspecies):sparse]),symbol)
pyl.xlim(xlim1,xlim2)
pyl.ylim(ylim1,ylim2)
pyl.legend()
def plot1D_mat(a, b, M, title=''):
""" Plot matrix M with the source and target 1D distribution
Creates a subplot with the source distribution a on the left and
target distribution b on the tot. The matrix M is shown in between.
Parameters
----------
a : np.array, shape (na,)
Source distribution
b : np.array, shape (nb,)
Target distribution
M : np.array, shape (na,nb)
Matrix to plot
"""
na, nb = M.shape
gs = gridspec.GridSpec(3, 3)
xa = np.arange(na)
xb = np.arange(nb)
ax1 = pl.subplot(gs[0, 1:])
pl.plot(xb, b, 'r', label='Target distribution')
pl.yticks(())
pl.title(title)
ax2 = pl.subplot(gs[1:, 0])
pl.plot(a, xa, 'b', label='Source distribution')
pl.gca().invert_xaxis()
pl.gca().invert_yaxis()
pl.xticks(())
pl.subplot(gs[1:, 1:], sharex=ax1, sharey=ax2)
pl.imshow(M, interpolation='nearest')
pl.axis('off')
pl.xlim((0, nb))
pl.tight_layout()
pl.subplots_adjust(wspace=0., hspace=0.2)
def generate_box_plot(dataset, methods, position_rmses, orientation_rmses):
num_methods = len(methods)
x_ticks = np.linspace(0., 1., num_methods)
width = 0.3 / num_methods
spacing = 0.3 / num_methods
fig, ax1 = plt.subplots()
ax1.set_ylabel('RMSE position [m]', color='b')
ax1.tick_params('y', colors='b')
fig.suptitle(
"Hand-Eye Calibration Method Error {}".format(dataset), fontsize='24')
bp_position = ax1.boxplot(position_rmses, 0, '',
positions=x_ticks - spacing, widths=width)
plt.setp(bp_position['boxes'], color='blue', linewidth=line_width)
plt.setp(bp_position['whiskers'], color='blue', linewidth=line_width)
plt.setp(bp_position['fliers'], color='blue',
marker='+', linewidth=line_width)
plt.setp(bp_position['caps'], color='blue', linewidth=line_width)
plt.setp(bp_position['medians'], color='blue', linewidth=line_width)
ax2 = ax1.twinx()
ax2.set_ylabel('RMSE Orientation [$^\circ$]', color='g')
ax2.tick_params('y', colors='g')
bp_orientation = ax2.boxplot(
orientation_rmses, 0, '', positions=x_ticks + spacing, widths=width)
plt.setp(bp_orientation['boxes'], color='green', linewidth=line_width)
plt.setp(bp_orientation['whiskers'], color='green', linewidth=line_width)
plt.setp(bp_orientation['fliers'], color='green',
marker='+')
plt.setp(bp_orientation['caps'], color='green', linewidth=line_width)
plt.setp(bp_orientation['medians'], color='green', linewidth=line_width)
plt.xticks(x_ticks, methods)
plt.xlim(x_ticks[0] - 2.5 * spacing, x_ticks[-1] + 2.5 * spacing)
plt.show()
def generate_time_plot(methods, datasets, runtimes_per_method, colors):
num_methods = len(methods)
num_datasets = len(datasets)
x_ticks = np.linspace(0., 1., num_methods)
width = 0.6 / num_methods / num_datasets
spacing = 0.4 / num_methods / num_datasets
fig, ax1 = plt.subplots()
ax1.set_ylabel('Time [s]', color='b')
ax1.tick_params('y', colors='b')
ax1.set_yscale('log')
fig.suptitle("Hand-Eye Calibration Method Timings", fontsize='24')
handles = []
for i, dataset in enumerate(datasets):
runtimes = [runtimes_per_method[dataset][method] for method in methods]
bp = ax1.boxplot(
runtimes, 0, '',
positions=(x_ticks + (i - num_datasets / 2. + 0.5) *
spacing * 2),
widths=width)
plt.setp(bp['boxes'], color=colors[i], linewidth=line_width)
plt.setp(bp['whiskers'], color=colors[i], linewidth=line_width)
plt.setp(bp['fliers'], color=colors[i],
marker='+', linewidth=line_width)
plt.setp(bp['medians'], color=colors[i],
marker='+', linewidth=line_width)
plt.setp(bp['caps'], color=colors[i], linewidth=line_width)
handles.append(mpatches.Patch(color=colors[i], label=dataset))
plt.legend(handles=handles, loc=2)
plt.xticks(x_ticks, methods)
plt.xlim(x_ticks[0] - 2.5 * spacing * num_datasets,
x_ticks[-1] + 2.5 * spacing * num_datasets)
plt.show()
def run_regression_1D():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
# mean, var = m.predict_f(xx)
samples, mf, vf = m.predict_f(xx, config.PROP_MC)
zu = m.sgp_layers[0].zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
# plt.plot(xx, mean, 'b', lw=2)
# plt.fill_between(
# xx[:, 0],
# mean[:, 0] - 2 * np.sqrt(var[:, 0]),
# mean[:, 0] + 2 * np.sqrt(var[:, 0]),
# color='blue', alpha=0.2)
plt.plot(np.tile(xx[np.newaxis, :], [200, 1]))
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
hidden_size = [2]
model = aep.SDGPR(X, Y, M, hidden_size, lik='Gaussian')
model.optimise(method='L-BFGS-B', alpha=1, maxiter=2000)
plot(model)
# plt.show()
plt.savefig('/tmp/aep_dgpr_1D.pdf')
def run_regression_1D_stoc():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layers[0].zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
hidden_size = [2]
model = aep.SDGPR(X, Y, M, hidden_size, lik='Gaussian')
model.optimise(method='adam', alpha=1.0,
maxiter=50000, mb_size=M, adam_lr=0.001)
plot(model)
plt.show()
plt.savefig('/tmp/aep_dgpr_1D_stoc.pdf')
def run_step_1D_collapsed():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1) * 3 - 1.5
Y = step(X)
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-3, 3, 100)[:, None]
mean, var = m.predict_f(xx, alpha)
zu = m.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var),
mean[:, 0] + 2 * np.sqrt(var),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
# no_samples = 20
# f_samples = m.sample_f(xx, no_samples)
# for i in range(no_samples):
# plt.plot(xx, f_samples[:, :, i], linewidth=0.5, alpha=0.5)
plt.xlim(-3, 3)
# inference
print "create model and optimize ..."
M = 20
alpha = 0.01
model = vfe.SGPR_collapsed(X, Y, M)
model.optimise(method='L-BFGS-B', alpha=alpha, maxiter=1000)
plot(model)
plt.show()
def run_regression_1D(nat_param=True):
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
model = vfe.SGPR(X, Y, M, lik='Gaussian', nat_param=nat_param)
model.optimise(method='L-BFGS-B', maxiter=20000)
# model.optimise(method='adam', adam_lr=0.05, maxiter=2000)
plot(model)
plt.show()
def run_step_1D():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1) * 3 - 1.5
Y = step(X)
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-3, 3, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
no_samples = 20
xx = np.linspace(-3, 3, 500)[:, None]
f_samples = m.sample_f(xx, no_samples)
for i in range(no_samples):
plt.plot(xx, f_samples[:, :, i], linewidth=0.5, alpha=0.5)
plt.xlim(-3, 3)
# inference
print "create model and optimize ..."
M = 20
model = vfe.SGPR(X, Y, M, lik='Gaussian')
model.optimise(method='L-BFGS-B', maxiter=2000)
plot(model)
plt.show()
def run_regression_1D_stoc():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-1.5, 2.5, 200)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
model = vfe.SGPR(X, Y, M, lik='Gaussian')
model.optimise(method='adam',
maxiter=100000, mb_size=N, adam_lr=0.001)
# plot(model)
# plt.show()
# plt.savefig('/tmp/vfe_gpr_1D_stoc.pdf')
def run_regression_1D_stoc():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
model = aep.SGPR(X, Y, M, lik='Gaussian')
model.optimise(method='adam', alpha=0.1,
maxiter=100000, mb_size=M, adam_lr=0.001)
plot(model)
plt.show()
plt.savefig('/tmp/aep_gpr_1D_stoc.pdf')
def run_step_1D():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1) * 3 - 1.5
Y = step(X)
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-3, 3, 100)[:, None]
mean, var = m.predict_y(xx)
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
no_samples = 20
xx = np.linspace(-3, 3, 500)[:, None]
f_samples = m.sample_f(xx, no_samples)
for i in range(no_samples):
plt.plot(xx, f_samples[:, :, i], linewidth=0.5, alpha=0.5)
plt.xlim(-3, 3)
# inference
print "create model and optimize ..."
M = 20
model = aep.SGPR(X, Y, M, lik='Gaussian')
model.optimise(method='L-BFGS-B', alpha=0.9, maxiter=2000)
plot(model)
plt.savefig('/tmp/aep_gpr_step.pdf')
# plt.show()
def run_regression_1D_pep_training(stoc=False):
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
alpha = 0.1
model_pep = pep.SGPR_rank_one(X, Y, M, lik='Gaussian')
if stoc:
mb_size = M
fname = '/tmp/gpr_pep_reg_stoc.pdf'
adam_lr = 0.005
else:
mb_size = N
fname = '/tmp/gpr_pep_reg.pdf'
adam_lr = 0.05
model_pep.optimise(method='adam', mb_size=mb_size, adam_lr=adam_lr, alpha=alpha, maxiter=2000)
plot(model_pep)
plt.savefig(fname)
def run_regression_1D():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layers[0].zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
hidden_size = [2]
model = aep.SDGPR_H(X, Y, M, hidden_size, lik='Gaussian')
model.optimise(method='L-BFGS-B', alpha=0.5, maxiter=2000)
plot(model)
plt.show()
plt.savefig('/tmp/aep_dgpr_1D.pdf')