def plot(m, Xtrain, ytrain):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
mean, var = m.predict_y(xx)
mean = np.reshape(mean, (xx.shape[0], 1))
var = np.reshape(var, (xx.shape[0], 1))
if isinstance(m, aep.SDGPR):
zu = m.sgp_layers[0].zu
elif isinstance(m, vfe.SGPR_collapsed):
zu = m.zu
else:
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(Xtrain, ytrain, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
# pdb.set_trace()
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
python类fill_between()的实例源码
utils.py 文件源码
项目:Building-Machine-Learning-Systems-With-Python-Second-Edition
作者: PacktPublishing
项目源码
文件源码
阅读 19
收藏 0
点赞 0
评论 0
def plot_roc(auc_score, name, tpr, fpr, label=None):
pylab.clf()
pylab.figure(num=None, figsize=(5, 4))
pylab.grid(True)
pylab.plot([0, 1], [0, 1], 'k--')
pylab.plot(fpr, tpr)
pylab.fill_between(fpr, tpr, alpha=0.5)
pylab.xlim([0.0, 1.0])
pylab.ylim([0.0, 1.0])
pylab.xlabel('False Positive Rate')
pylab.ylabel('True Positive Rate')
pylab.title('ROC curve (AUC = %0.2f) / %s' %
(auc_score, label), verticalalignment="bottom")
pylab.legend(loc="lower right")
filename = name.replace(" ", "_")
pylab.savefig(
os.path.join(CHART_DIR, "roc_" + filename + ".png"), bbox_inches="tight")
def run_regression_1D():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
# mean, var = m.predict_f(xx)
samples, mf, vf = m.predict_f(xx, config.PROP_MC)
zu = m.sgp_layers[0].zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
# plt.plot(xx, mean, 'b', lw=2)
# plt.fill_between(
# xx[:, 0],
# mean[:, 0] - 2 * np.sqrt(var[:, 0]),
# mean[:, 0] + 2 * np.sqrt(var[:, 0]),
# color='blue', alpha=0.2)
plt.plot(np.tile(xx[np.newaxis, :], [200, 1]))
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
hidden_size = [2]
model = aep.SDGPR(X, Y, M, hidden_size, lik='Gaussian')
model.optimise(method='L-BFGS-B', alpha=1, maxiter=2000)
plot(model)
# plt.show()
plt.savefig('/tmp/aep_dgpr_1D.pdf')
def run_regression_1D_stoc():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layers[0].zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
hidden_size = [2]
model = aep.SDGPR(X, Y, M, hidden_size, lik='Gaussian')
model.optimise(method='adam', alpha=1.0,
maxiter=50000, mb_size=M, adam_lr=0.001)
plot(model)
plt.show()
plt.savefig('/tmp/aep_dgpr_1D_stoc.pdf')
def run_step_1D_collapsed():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1) * 3 - 1.5
Y = step(X)
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-3, 3, 100)[:, None]
mean, var = m.predict_f(xx, alpha)
zu = m.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var),
mean[:, 0] + 2 * np.sqrt(var),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
# no_samples = 20
# f_samples = m.sample_f(xx, no_samples)
# for i in range(no_samples):
# plt.plot(xx, f_samples[:, :, i], linewidth=0.5, alpha=0.5)
plt.xlim(-3, 3)
# inference
print "create model and optimize ..."
M = 20
alpha = 0.01
model = vfe.SGPR_collapsed(X, Y, M)
model.optimise(method='L-BFGS-B', alpha=alpha, maxiter=1000)
plot(model)
plt.show()
def run_regression_1D(nat_param=True):
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
model = vfe.SGPR(X, Y, M, lik='Gaussian', nat_param=nat_param)
model.optimise(method='L-BFGS-B', maxiter=20000)
# model.optimise(method='adam', adam_lr=0.05, maxiter=2000)
plot(model)
plt.show()
def run_step_1D():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1) * 3 - 1.5
Y = step(X)
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-3, 3, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
no_samples = 20
xx = np.linspace(-3, 3, 500)[:, None]
f_samples = m.sample_f(xx, no_samples)
for i in range(no_samples):
plt.plot(xx, f_samples[:, :, i], linewidth=0.5, alpha=0.5)
plt.xlim(-3, 3)
# inference
print "create model and optimize ..."
M = 20
model = vfe.SGPR(X, Y, M, lik='Gaussian')
model.optimise(method='L-BFGS-B', maxiter=2000)
plot(model)
plt.show()
def run_regression_1D_stoc():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-1.5, 2.5, 200)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
model = vfe.SGPR(X, Y, M, lik='Gaussian')
model.optimise(method='adam',
maxiter=100000, mb_size=N, adam_lr=0.001)
# plot(model)
# plt.show()
# plt.savefig('/tmp/vfe_gpr_1D_stoc.pdf')
def run_regression_1D():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
model = aep.SGPR(X, Y, M, lik='Gaussian')
model.optimise(method='L-BFGS-B', alpha=0.1, maxiter=50000)
plot(model)
plt.show()
def run_regression_1D_stoc():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
model = aep.SGPR(X, Y, M, lik='Gaussian')
model.optimise(method='adam', alpha=0.1,
maxiter=100000, mb_size=M, adam_lr=0.001)
plot(model)
plt.show()
plt.savefig('/tmp/aep_gpr_1D_stoc.pdf')
def run_step_1D():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1) * 3 - 1.5
Y = step(X)
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-3, 3, 100)[:, None]
mean, var = m.predict_y(xx)
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
no_samples = 20
xx = np.linspace(-3, 3, 500)[:, None]
f_samples = m.sample_f(xx, no_samples)
for i in range(no_samples):
plt.plot(xx, f_samples[:, :, i], linewidth=0.5, alpha=0.5)
plt.xlim(-3, 3)
# inference
print "create model and optimize ..."
M = 20
model = aep.SGPR(X, Y, M, lik='Gaussian')
model.optimise(method='L-BFGS-B', alpha=0.9, maxiter=2000)
plot(model)
plt.savefig('/tmp/aep_gpr_step.pdf')
# plt.show()
def run_regression_1D_pep_training(stoc=False):
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
alpha = 0.1
model_pep = pep.SGPR_rank_one(X, Y, M, lik='Gaussian')
if stoc:
mb_size = M
fname = '/tmp/gpr_pep_reg_stoc.pdf'
adam_lr = 0.005
else:
mb_size = N
fname = '/tmp/gpr_pep_reg.pdf'
adam_lr = 0.05
model_pep.optimise(method='adam', mb_size=mb_size, adam_lr=adam_lr, alpha=alpha, maxiter=2000)
plot(model_pep)
plt.savefig(fname)
def run_regression_1D():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layers[0].zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
hidden_size = [2]
model = aep.SDGPR_H(X, Y, M, hidden_size, lik='Gaussian')
model.optimise(method='L-BFGS-B', alpha=0.5, maxiter=2000)
plot(model)
plt.show()
plt.savefig('/tmp/aep_dgpr_1D.pdf')
def run_regression_1D_stoc():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layers[0].zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
hidden_size = [2]
model = aep.SDGPR(X, Y, M, hidden_size, lik='Gaussian')
model.optimise(method='adam', alpha=1.0,
maxiter=50000, mb_size=M, adam_lr=0.001)
plot(model)
plt.show()
plt.savefig('/tmp/aep_dgpr_1D_stoc.pdf')
def plotlinfit(xdata,ydata,a,b,erra,errb,cov,linestyle='',conf=0.683,confcolor='gray',xplot=None,front=False,**args):
"""
This is a wrapper that given the output data from a linear regression
method (for example, bayeslin.pro, the Bayesian linear regression method
of Kelly (2007)), it plots the fits and the confidence bands.
The input is:
X, Y, slope (A), errA, intercept (B), errB and cov(A,B)
Assumes you initialized the plot window before calling this method.
Usage:
>>> nemmen.plotlinfit(x,y,a,b,erra,errb,covab,linestyle='k',confcolor='LightGrey')
Explanation of some arguments:
- xplot: if provided, will compute the confidence band in the X-values provided
with xplot
- front: if True, then will plot the confidence band in front of the data
points; otherwise, will plot it behind the points
"""
# Plots best-fit
if xplot==None:
x=numpy.linspace(xdata.min(),xdata.max(),100)
else:
x=xplot
pylab.plot(x,a*x+b,linestyle,**args)
fitm=numpy.array([ a,b ]) # array with best-fit parameters
covm=numpy.array([ (erra**2,cov), (cov,errb**2) ]) # covariance matrix
def func(x): return x[1]*x[0]+x[2]
# Plots confidence band
lcb,ucb,xcb=confbandnl(xdata,ydata,func,fitm,covm,2,conf,x)
if front==True:
zorder=10
else:
zorder=None
pylab.fill_between(xcb, lcb, ucb, alpha=0.3, facecolor=confcolor, zorder=zorder)
utils.py 文件源码
项目:Building-Machine-Learning-Systems-With-Python-Second-Edition
作者: PacktPublishing
项目源码
文件源码
阅读 25
收藏 0
点赞 0
评论 0
def plot_pr(auc_score, name, phase, precision, recall, label=None):
pylab.clf()
pylab.figure(num=None, figsize=(5, 4))
pylab.grid(True)
pylab.fill_between(recall, precision, alpha=0.5)
pylab.plot(recall, precision, lw=1)
pylab.xlim([0.0, 1.0])
pylab.ylim([0.0, 1.0])
pylab.xlabel('Recall')
pylab.ylabel('Precision')
pylab.title('P/R curve (AUC=%0.2f) / %s' % (auc_score, label))
filename = name.replace(" ", "_")
pylab.savefig(os.path.join(CHART_DIR, "pr_%s_%s.png" %
(filename, phase)), bbox_inches="tight")
utils.py 文件源码
项目:Building-Machine-Learning-Systems-With-Python-Second-Edition
作者: PacktPublishing
项目源码
文件源码
阅读 24
收藏 0
点赞 0
评论 0
def plot_roc(auc_score, name, fpr, tpr):
pylab.figure(num=None, figsize=(6, 5))
pylab.plot([0, 1], [0, 1], 'k--')
pylab.xlim([0.0, 1.0])
pylab.ylim([0.0, 1.0])
pylab.xlabel('False Positive Rate')
pylab.ylabel('True Positive Rate')
pylab.title('Receiver operating characteristic (AUC=%0.2f)\n%s' % (
auc_score, name))
pylab.legend(loc="lower right")
pylab.grid(True, linestyle='-', color='0.75')
pylab.fill_between(tpr, fpr, alpha=0.5)
pylab.plot(fpr, tpr, lw=1)
pylab.savefig(
os.path.join(CHART_DIR, "roc_" + name.replace(" ", "_") + ".png"))
utils.py 文件源码
项目:Building-Machine-Learning-Systems-With-Python-Second-Edition
作者: PacktPublishing
项目源码
文件源码
阅读 21
收藏 0
点赞 0
评论 0
def plot_pr(auc_score, name, precision, recall, label=None):
pylab.figure(num=None, figsize=(6, 5))
pylab.xlim([0.0, 1.0])
pylab.ylim([0.0, 1.0])
pylab.xlabel('Recall')
pylab.ylabel('Precision')
pylab.title('P/R (AUC=%0.2f) / %s' % (auc_score, label))
pylab.fill_between(recall, precision, alpha=0.5)
pylab.grid(True, linestyle='-', color='0.75')
pylab.plot(recall, precision, lw=1)
filename = name.replace(" ", "_")
pylab.savefig(os.path.join(CHART_DIR, "pr_" + filename + ".png"))
utils.py 文件源码
项目:Building-Machine-Learning-Systems-With-Python-Second-Edition
作者: PacktPublishing
项目源码
文件源码
阅读 21
收藏 0
点赞 0
评论 0
def plot_pr(auc_score, name, precision, recall, label=None):
pylab.clf()
pylab.figure(num=None, figsize=(5, 4))
pylab.grid(True)
pylab.fill_between(recall, precision, alpha=0.5)
pylab.plot(recall, precision, lw=1)
pylab.xlim([0.0, 1.0])
pylab.ylim([0.0, 1.0])
pylab.xlabel('Recall')
pylab.ylabel('Precision')
pylab.title('P/R curve (AUC = %0.2f) / %s' % (auc_score, label))
filename = name.replace(" ", "_")
pylab.savefig(
os.path.join(CHART_DIR, "pr_" + filename + ".png"), bbox_inches="tight")
def plot_roc(auc_score, name, fpr, tpr):
pylab.figure(num=None, figsize=(6, 5))
pylab.plot([0, 1], [0, 1], 'k--')
pylab.xlim([0.0, 1.0])
pylab.ylim([0.0, 1.0])
pylab.xlabel('False Positive Rate')
pylab.ylabel('True Positive Rate')
pylab.title('Receiver operating characteristic (AUC=%0.2f)\n%s' % (
auc_score, name))
pylab.legend(loc="lower right")
pylab.grid(True, linestyle='-', color='0.75')
pylab.fill_between(tpr, fpr, alpha=0.5)
pylab.plot(fpr, tpr, lw=1)
pylab.savefig(os.path.join(CHART_DIR, "roc_" + name.replace(" ", "_")+ ".png"))
def plot_pr(auc_score, name, precision, recall, label=None):
pylab.figure(num=None, figsize=(6, 5))
pylab.xlim([0.0, 1.0])
pylab.ylim([0.0, 1.0])
pylab.xlabel('Recall')
pylab.ylabel('Precision')
pylab.title('P/R (AUC=%0.2f) / %s' % (auc_score, label))
pylab.fill_between(recall, precision, alpha=0.5)
pylab.grid(True, linestyle='-', color='0.75')
pylab.plot(recall, precision, lw=1)
filename = name.replace(" ", "_")
pylab.savefig(os.path.join(CHART_DIR, "pr_" + filename + ".png"))
def plot_roc(auc_score, name, fpr, tpr):
pylab.figure(num=None, figsize=(6, 5))
pylab.plot([0, 1], [0, 1], 'k--')
pylab.xlim([0.0, 1.0])
pylab.ylim([0.0, 1.0])
pylab.xlabel('False Positive Rate')
pylab.ylabel('True Positive Rate')
pylab.title('Receiver operating characteristic (AUC=%0.2f)\n%s' % (
auc_score, name))
pylab.legend(loc="lower right")
pylab.grid(True, linestyle='-', color='0.75')
pylab.fill_between(tpr, fpr, alpha=0.5)
pylab.plot(fpr, tpr, lw=1)
pylab.savefig(os.path.join(CHART_DIR, "roc_" + name.replace(" ", "_")+ ".png"))
def plot_pr(auc_score, name, precision, recall, label=None):
pylab.figure(num=None, figsize=(6, 5))
pylab.xlim([0.0, 1.0])
pylab.ylim([0.0, 1.0])
pylab.xlabel('Recall')
pylab.ylabel('Precision')
pylab.title('P/R (AUC=%0.2f) / %s' % (auc_score, label))
pylab.fill_between(recall, precision, alpha=0.5)
pylab.grid(True, linestyle='-', color='0.75')
pylab.plot(recall, precision, lw=1)
filename = name.replace(" ", "_")
pylab.savefig(os.path.join(CHART_DIR, "pr_" + filename + ".png"))
def run_regression_1D():
np.random.seed(42)
print "create dataset ..."
N = 50
rng = np.random.RandomState(42)
X = np.sort(2 * rng.rand(N, 1) - 1, axis=0)
Y = np.array([np.pi * np.sin(10 * X).ravel(),
np.pi * np.cos(10 * X).ravel()]).T
Y += (0.5 - rng.rand(*Y.shape))
Y = Y / np.std(Y, axis=0)
def plot(model, alpha, fname):
xx = np.linspace(-1.2, 1.2, 200)[:, None]
if isinstance(model, IndepSGPR):
mf, vf = model.predict_f(xx, alpha)
else:
# mf, vf = model.predict_f(xx, alpha, use_mean_only=False)
mf, vf = model.predict_f(xx, alpha, use_mean_only=True)
colors = ['r', 'b']
plt.figure()
for i in range(model.Dout):
plt.subplot(model.Dout, 1, i + 1)
plt.plot(X, Y[:, i], 'x', color=colors[i], mew=2)
zu = model.models[i].zu
mean_u, var_u = model.models[i].predict_f(zu, alpha)
plt.plot(xx, mf[:, i], '-', color=colors[i], lw=2)
plt.fill_between(
xx[:, 0],
mf[:, i] - 2 * np.sqrt(vf[:, i]),
mf[:, i] + 2 * np.sqrt(vf[:, i]),
color=colors[i], alpha=0.3)
# plt.errorbar(zu[:, 0], mean_u, yerr=2*np.sqrt(var_u), fmt='ro')
plt.xlim(-1.2, 1.2)
plt.savefig(fname)
# inference
print "create independent output model and optimize ..."
M = N
alpha = 0.01
indep_model = IndepSGPR(X, Y, M)
indep_model.train(alpha=alpha)
plot(indep_model, alpha, '/tmp/reg_indep_multioutput.pdf')
print "create correlated output model and optimize ..."
M = N
ar_model = AutoSGPR(X, Y, M)
ar_model.train(alpha=alpha)
plot(ar_model, alpha, '/tmp/reg_autoreg_multioutput.pdf')
def run_step_1D():
np.random.seed(42)
def step(x):
y = x.copy()
y[y < 0.0] = 0.0
y[y > 0.0] = 1.0
return y + 0.02 * np.random.randn(x.shape[0], 1)
print "create dataset ..."
N = 100
X = np.random.rand(N, 1) * 3 - 1.5
Y = step(X) - 0.5
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-3, 3, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layers[0].zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
no_samples = 20
xx = np.linspace(-3, 3, 500)[:, None]
f_samples = m.sample_f(xx, no_samples)
for i in range(no_samples):
plt.plot(xx, f_samples[:, :, i], linewidth=0.5, alpha=0.5)
plt.xlim(-3, 3)
# inference
print "create model and optimize ..."
M = 20
hidden_size = [2]
model = aep.SDGPR(X, Y, M, hidden_size, lik='Gaussian')
# model.optimise(method='L-BFGS-B', alpha=1, maxiter=1000)
model.optimise(method='adam', adam_lr=0.05, alpha=1, maxiter=2000)
plot(model)
plt.show()
def plot_posterior_linear(params_fname, fig_fname, control=False, M=20):
# load dataset
data = np.loadtxt('./sandbox/hh_data.txt')
# use the voltage and potasisum current
data = data / np.std(data, axis=0)
y = data[:, :4]
xc = data[:, [-1]]
# init hypers
Dlatent = 2
Dobs = y.shape[1]
T = y.shape[0]
if control:
x_control = xc
no_panes = 5
else:
x_control = None
no_panes = 4
model_aep = aep.SGPSSM_Linear(y, Dlatent, M,
lik='Gaussian', prior_mean=0, prior_var=1000, x_control=x_control)
model_aep.load_model(params_fname)
my, vy, vyn = model_aep.get_posterior_y()
vy_diag = np.diagonal(vy, axis1=1, axis2=2)
vyn_diag = np.diagonal(vyn, axis1=1, axis2=2)
cs = ['k', 'r', 'b', 'g']
labels = ['V', 'm', 'n', 'h']
plt.figure()
t = np.arange(T)
for i in range(4):
yi = y[:, i]
mi = my[:, i]
vi = vy_diag[:, i]
vin = vyn_diag[:, i]
plt.subplot(no_panes, 1, i + 1)
plt.fill_between(t, mi + 2 * np.sqrt(vi), mi - 2 *
np.sqrt(vi), color=cs[i], alpha=0.4)
plt.plot(t, mi, '-', color=cs[i])
plt.plot(t, yi, '--', color=cs[i])
plt.ylabel(labels[i])
plt.xticks([])
plt.yticks([])
if control:
plt.subplot(no_panes, 1, no_panes)
plt.plot(t, x_control, '-', color='m')
plt.ylabel('I')
plt.yticks([])
plt.xlabel('t')
plt.savefig(fig_fname)
if control:
plot_model_with_control(model_aep, '', '_linear_with_control')
else:
plot_model_no_control(model_aep, '', '_linear_no_control')
def plot_prediction_gp(params_fname, fig_fname, M=20):
# load dataset
data = np.loadtxt('./sandbox/hh_data.txt')
# use the voltage and potasisum current
data = data / np.std(data, axis=0)
y = data[:, :4]
xc = data[:, [-1]]
# init hypers
Dlatent = 2
Dobs = y.shape[1]
T = y.shape[0]
x_control = xc
# x_control_test = np.flipud(x_control)
x_control_test = x_control * 1.5
no_panes = 5
model_aep = aep.SGPSSM_GP(y, Dlatent, M,
lik='Gaussian', prior_mean=0, prior_var=1000, x_control=x_control)
model_aep.load_model(params_fname)
print 'ls ', np.exp(model_aep.dyn_layer.ls)
my, vy, vyn = model_aep.get_posterior_y()
mxp, vxp, myp, vyp, vynp = model_aep.predict_forward(T, x_control_test)
cs = ['k', 'r', 'b', 'g']
labels = ['V', 'm', 'n', 'h']
plt.figure()
t = np.arange(T)
for i in range(4):
yi = y[:, i]
mi = my[:, i]
vi = vy[:, i]
vin = vyn[:, i]
mip = myp[:, i]
vip = vyp[:, i]
vinp = vynp[:, i]
plt.subplot(5, 1, i + 1)
plt.fill_between(t, mi + 2 * np.sqrt(vi), mi - 2 *
np.sqrt(vi), color=cs[i], alpha=0.4)
plt.plot(t, mi, '-', color=cs[i])
plt.fill_between(np.arange(T, 2 * T), mip + 2 * np.sqrt(vip),
mip - 2 * np.sqrt(vip), color=cs[i], alpha=0.4)
plt.plot(np.arange(T, 2 * T), mip, '-', color=cs[i])
plt.plot(t, yi, '--', color=cs[i])
plt.axvline(x=T, color='k', linewidth=2)
plt.ylabel(labels[i])
plt.xticks([])
plt.yticks([])
plt.subplot(no_panes, 1, no_panes)
plt.plot(t, x_control, '-', color='m')
plt.plot(np.arange(T, 2 * T), x_control_test, '-', color='m')
plt.axvline(x=T, color='k', linewidth=2)
plt.ylabel('I')
plt.yticks([])
plt.xlabel('t')
plt.savefig(fig_fname)
def run_regression_1D():
np.random.seed(42)
print "create dataset ..."
N = 200
X = np.random.rand(N, 1)
Y = np.sin(12 * X) + 0.5 * np.cos(25 * X) + np.random.randn(N, 1) * 0.2
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-0.5, 1.5, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layer.zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
plt.xlim(-0.1, 1.1)
# inference
print "create model and optimize ..."
M = 20
alpha = 0.1
model_aep = aep.SGPR(X, Y, M, lik='Gaussian')
model_aep.optimise(method='L-BFGS-B', alpha=alpha, maxiter=2000)
plot(model_aep)
plt.savefig('/tmp/gpr_aep_reg.pdf')
start_time = time.time()
model = pep.SGPR(X, Y, M, lik='Gaussian')
model.update_hypers(model_aep.get_hypers())
# model.update_hypers(model.init_hypers())
model.inference(alpha=alpha, no_epochs=10)
end_time = time.time()
print "sequential updates: %.4f" % (end_time - start_time)
plot(model)
plt.savefig('/tmp/gpr_pep_reg_seq.pdf')
start_time = time.time()
model = pep.SGPR(X, Y, M, lik='Gaussian')
model.update_hypers(model_aep.get_hypers())
# model.update_hypers(model.init_hypers())
model.inference(alpha=alpha, no_epochs=10, parallel=True)
end_time = time.time()
print "parallel updates: %.4f" % (end_time - start_time)
plot(model)
plt.savefig('/tmp/gpr_pep_reg_par.pdf')
# plt.show()
def run_step_1D():
np.random.seed(42)
def step(x):
y = x.copy()
y[y < 0.0] = 0.0
y[y > 0.0] = 1.0
return y + 0.05 * np.random.randn(x.shape[0], 1)
print "create dataset ..."
N = 100
X = np.random.rand(N, 1) * 3 - 1.5
Y = step(X)
# plt.plot(X, Y, 'kx', mew=2)
def plot(m):
xx = np.linspace(-3, 3, 100)[:, None]
mean, var = m.predict_f(xx)
zu = m.sgp_layers[0].zu
mean_u, var_u = m.predict_f(zu)
plt.figure()
plt.plot(X, Y, 'kx', mew=2)
plt.plot(xx, mean, 'b', lw=2)
plt.fill_between(
xx[:, 0],
mean[:, 0] - 2 * np.sqrt(var[:, 0]),
mean[:, 0] + 2 * np.sqrt(var[:, 0]),
color='blue', alpha=0.2)
plt.errorbar(zu, mean_u, yerr=2 * np.sqrt(var_u), fmt='ro')
no_samples = 20
xx = np.linspace(-3, 3, 500)[:, None]
f_samples = m.sample_f(xx, no_samples)
for i in range(no_samples):
plt.plot(xx, f_samples[:, :, i], linewidth=0.5, alpha=0.5)
plt.xlim(-3, 3)
# inference
print "create model and optimize ..."
M = 20
hidden_size = [3, 2]
model = aep.SDGPR_H(X, Y, M, hidden_size, lik='Gaussian')
model.optimise(method='L-BFGS-B', alpha=1, maxiter=1000)
plot(model)
plt.show()
def fitconf(xdata,ydata,errx,erry,covxy,nboot=1000,bces='ort',linestyle='',conf=0.683,confcolor='gray',xplot=None,front=False,**args):
"""
This is a wrapper that given the input data performs the BCES
fit, get the orthogonal parameters and plot the best-fit line and
confidence band (generated using analytical methods). I decided to put together
these commands in a method because I have been using them very frequently.
Assumes you initialized the plot window before calling this method.
Usage:
>>> a1,b1,erra1,errb1,cov1=nemmen.fitconf(x[i],y[i],errx[i],erry[i],covxy[i],nboot,bces,linestyle='k',confcolor='LightGrey')
Explanation of some arguments:
- xplot: if provided, will compute the confidence band in the X-values provided
with xplot
- front: if True, then will plot the confidence band in front of the data
points; otherwise, will plot it behind the points
"""
# Selects the desired BCES method
i=whichbces(bces)
# Performs the BCES fit
a,b,erra,errb,cov=bcesp(xdata,errx,ydata,erry,covxy,nboot)
# Plots best-fit
if xplot==None:
x=numpy.linspace(xdata.min(),xdata.max(),100)
else:
x=xplot
pylab.plot(x,a[i]*x+b[i],linestyle,**args)
fitm=numpy.array([ a[i],b[i] ]) # array with best-fit parameters
covm=numpy.array([ (erra[i]**2,cov[i]), (cov[i],errb[i]**2) ]) # covariance matrix
def func(x): return x[1]*x[0]+x[2]
# Plots confidence band
lcb,ucb,xcb=confbandnl(xdata,ydata,func,fitm,covm,2,conf,x)
if front==True:
zorder=10
else:
zorder=None
pylab.fill_between(xcb, lcb, ucb, alpha=0.3, facecolor=confcolor, zorder=zorder)
return a,b,erra,errb,cov