def write_h(image_filename, data):
try:
monthly_ret = await aggregate_returns(returns=data, convert_to='monthly')
monthly_ret = monthly_ret.unstack()
monthly_ret = round(monthly_ret, 3)
monthly_ret.rename(
columns={1: 'Jan', 2: 'Feb', 3: 'Mar', 4: 'Apr',
5: 'May', 6: 'Jun', 7: 'Jul', 8: 'Aug',
9: 'Sep', 10: 'Oct', 11: 'Nov', 12: 'Dec'},
inplace=True
)
ax = plt.gca()
sns.heatmap(
monthly_ret.fillna(0), # * 100.0,
annot=True,
fmt="0.1f",
annot_kws={"size": 8},
alpha=1.0,
center=0.0,
cbar=False,
cmap=cm.RdYlGn,
ax=ax)
ax.set_title('Returns heatmap, %', fontweight='bold')
plt.savefig(image_filename)
plt.close()
if settings.SHOW_DEBUG:
print(colored.green("Wrote heatmap image for {}\n".format(image_filename)))
except Exception as err:
print(colored.red("At write_heatmap {}".format(err)))
python类heatmap()的实例源码
def save_heatmap(data, info):
try:
image_filename = filename_constructor(info=info, folder="heatmap")
if (not isfile(image_filename)) | (datetime.fromtimestamp(getmtime(image_filename)) < \
(datetime.now() - timedelta(days=30))):
await write_h(image_filename=image_filename, data=data)
except Exception as err:
print(colored.red("At save_heatmap {}".format(err)))
def generate_monthly_heatmaps(loop):
brokers = Brokers.objects.all()
path_to = join(settings.DATA_PATH, "performance")
filenames = multi_filenames(path_to_history=path_to)
loop.run_until_complete(gather(*[make_heat_img(\
path_to=path_to, filename=filename) for filename in filenames], \
return_exceptions=True))
#AI50 index heatmap
loop.run_until_complete(gather(*[qindex_heatmap(broker=broker.slug) for broker in brokers],
return_exceptions=True))
def compute(self, config):
INPUT_ITR = self.iterator_batch(self._iterator_mean_cluster_vectors())
Z = self.cluster_affinity_states(INPUT_ITR, size=self.cluster_n)
print("Initial affinity grouping", Z.shape)
# print self.vocab_n, self.cluster_n
INPUT_ITR = self.iterator_batch(Z)
Z2 = self.cluster_affinity_states(INPUT_ITR, size=len(Z))
print("Final affinity size", len(Z2))
self.save(config, Z2)
'''
import seaborn as sns
plt = sns.plt
DZ2 = cdist(Z2,Z2,metric='cosine')
sns.heatmap(DZ2,xticklabels=False, yticklabels=False,linewidths=0)
sns.plt.figure()
#plt.show()
DZ = cdist(Z,Z,metric='cosine')
sns.heatmap(DZ,xticklabels=False, yticklabels=False,linewidths=0)
#sns.plt.figure()
sns.plt.show()
'''
self.h5.close()
def plot_heatmaps(data, labels, alpha, mis, column_label, cont, topk=20, prefix='', focus=''):
cmap = sns.cubehelix_palette(as_cmap=True, light=.9)
m, nv = mis.shape
for j in range(m):
inds = np.where(np.logical_and(alpha[j] > 0, mis[j] > 0.))[0]
inds = inds[np.argsort(- alpha[j, inds] * mis[j, inds])][:topk]
if focus in column_label:
ifocus = column_label.index(focus)
if not ifocus in inds:
inds = np.insert(inds, 0, ifocus)
if len(inds) >= 2:
plt.clf()
order = np.argsort(cont[:,j])
subdata = data[:, inds][order].T
subdata -= np.nanmean(subdata, axis=1, keepdims=True)
subdata /= np.nanstd(subdata, axis=1, keepdims=True)
columns = [column_label[i] for i in inds]
sns.heatmap(subdata, vmin=-3, vmax=3, cmap=cmap, yticklabels=columns, xticklabels=False, mask=np.isnan(subdata))
filename = '{}/heatmaps/group_num={}.png'.format(prefix, j)
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
plt.title("Latent factor {}".format(j))
plt.savefig(filename, bbox_inches='tight')
plt.close('all')
#plot_rels(data[:, inds], list(map(lambda q: column_label[q], inds)), colors=cont[:, j],
# outfile=prefix + '/relationships/group_num=' + str(j), latent=labels[:, j], alpha=0.1)
def plot_confusion_matrix(targets, predictions, target_names,
title='Confusion matrix', cmap="Blues"):
"""Plot Confusion Matrix."""
cm = confusion_matrix(targets, predictions)
cm = 100 * cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
df = pd.DataFrame(data=cm, columns=target_names, index=target_names)
g = sns.heatmap(df, annot=True, fmt=".1f", linewidths=.5, vmin=0, vmax=100,
cmap=cmap)
g.set_title(title)
g.set_ylabel('True label')
g.set_xlabel('Predicted label')
return g
def plot_activity(series, savename='activity.png'):
"""Plots the Reviewers' activity"""
# Fills the time series
## Fill up to next staurday (end of the week)
series = fill_week(series)
### Fill or truncate timeseries to suit the plot
number_of_days = 371
if series.shape[0] > number_of_days:
# truncate to 371 days
series = series[-number_of_days:]
elif series.shape[0] < number_of_days:
# Fill remaing values with zero
series = fill_year(series)
assert series.shape[0] == number_of_days
# Obtain the months for the years' week
months = series.index.map(lambda x: x.strftime('%b')).tolist()
n_weekdays = 7
# Split in weeks
months = months[::n_weekdays]
# replace the repeated months
current_month = ''
for n, month in enumerate(months):
if month == current_month:
months[n] = ''
else:
current_month = month
# Plot
fig, ax = plt.subplots()
sns.heatmap(series.values.reshape(-1,n_weekdays).T, ax=ax,
cmap='YlGn', cbar=False, linewidths=1, square=True,
xticklabels=months,
yticklabels=['','M', '', 'W', '', 'F', ''])
ax.xaxis.tick_top()
plt.savefig(savename, bbox_inches='tight')
def visualize_sensors(state):
# Clear.
sns.plt.clf()
# Make a 2d list.
cols = [state[0]]
# Plot it.
sns.heatmap(data=cols, cmap="Blues_r", yticklabels=False)
# Draw it.
sns.plt.draw()
# Add a pause because you're supposed to.
sns.plt.pause(0.05)
def plot(returns,
title="Monthly Returns (%)\n",
title_color="black",
title_size=14,
annot_size=10,
figsize=None,
cmap='RdYlGn',
cbar=True,
square=False,
is_prices=False,
eoy=False):
returns = get(returns, eoy=eoy, is_prices=is_prices)
returns *= 100
if figsize is None:
size = list(plt.gcf().get_size_inches())
figsize = (size[0], size[0] // 2)
plt.close()
fig, ax = plt.subplots(figsize=figsize)
ax = sns.heatmap(returns, ax=ax, annot=True, center=0,
annot_kws={"size": annot_size},
fmt="0.2f", linewidths=0.5,
square=square, cbar=cbar, cmap=cmap)
ax.set_title(title, fontsize=title_size,
color=title_color, fontweight="bold")
fig.subplots_adjust(hspace=0)
plt.yticks(rotation=0)
plt.show()
plt.close()
def test():
saver.restore(sess, FLAGS.save_dir+'/model.ckpt')
batch_x, _ = mnist.test.next_batch(batch_size)
fig = plt.figure('original')
plt.gray()
plt.axis('off')
plt.imshow(batchmat_to_tileimg(batch_x, (height, width), (10, 10)))
fig.savefig(FLAGS.save_dir+'/original.png')
fig = plt.figure('reconstructed')
plt.gray()
plt.axis('off')
p_recon = sess.run(p, {x:batch_x})
plt.imshow(batchmat_to_tileimg(p_recon, (height, width), (10, 10)))
fig.savefig(FLAGS.save_dir+'/reconstructed.png')
batch_w = np.zeros((n_fac*n_fac, n_fac))
for i in range(n_fac):
batch_w[i*n_fac:(i+1)*n_fac, i] = 1.0
batch_z = np.random.normal(size=(n_fac*n_fac, n_lat))
p_gen = sess.run(p, {w:batch_w, z:batch_z})
I_gen = batchmat_to_tileimg(p_gen, (height, width), (n_fac, n_fac))
fig = plt.figure('generated')
plt.gray()
plt.axis('off')
plt.imshow(I_gen)
fig.savefig(FLAGS.save_dir+'/generated.png')
fig = plt.figure('factor activation heatmap')
hist = np.zeros((10, n_fac))
for i in range(mnist.test.num_examples):
batch_x, batch_y = mnist.test.next_batch(batch_size)
batch_w = sess.run(w, {x:batch_x})
for i in range(batch_size):
hist[batch_y[i], batch_w[i] > 0] += 1
sns.heatmap(hist)
fig.savefig(FLAGS.save_dir+'/feature_activation.png')
plt.show()
def test():
saver.restore(sess, FLAGS.save_dir+'/model.ckpt')
batch_x = test_x[0:100]
fig = plt.figure('original')
plt.gray()
plt.axis('off')
plt.imshow(batchmat_to_tileimg(batch_x, (height, width), (10, 10)))
fig.savefig(FLAGS.save_dir+'/original.png')
fig = plt.figure('reconstructed')
plt.gray()
plt.axis('off')
p_recon = sess.run(p, {x:batch_x})
plt.imshow(batchmat_to_tileimg(p_recon, (height, width), (10, 10)))
fig.savefig(FLAGS.save_dir+'/reconstructed.png')
batch_w = np.zeros((n_fac*n_fac, n_fac))
for i in range(n_fac):
batch_w[i*n_fac:(i+1)*n_fac, i] = 1.0
batch_z = np.random.normal(size=(n_fac*n_fac, n_lat))
p_gen = sess.run(p, {w:batch_w, z:batch_z})
I_gen = batchmat_to_tileimg(p_gen, (height, width), (n_fac, n_fac))
fig = plt.figure('generated')
plt.gray()
plt.axis('off')
plt.imshow(I_gen)
fig.savefig(FLAGS.save_dir+'/generated.png')
"""
fig = plt.figure('factor activation heatmap')
hist = np.zeros((10, n_fac))
for i in range(len(test_x)):
batch_x = test_x[i*batch_size:(i+1)*batch_size]
batch_w = sess.run(w, {x:batch_x})
for i in range(batch_size):
hist[batch_y[i], batch_w[i] > 0] += 1
sns.heatmap(hist)
fig.savefig(FLAGS.save_dir+'/feature_activation.png')
"""
plt.show()
def build_column_key(column, neurotransmitter, dt=None, heatmap=None):
return "column_{0}_{1}_{2}_{3}".format(column, get_neurotransmitter_name(neurotransmitter), dt, heatmap)
def build_layer_key(index, neurotransmitter, dt=None, heatmap=None):
return "layer_{0}_{1}_{2}_{3}".format(index, get_neurotransmitter_name(neurotransmitter), dt, heatmap)
def set_flag_to_column(column, neurotransmitter, heatmap=False, dt=1, multimeter=False):
for neurotransmitter in (Glu, GABA) if neurotransmitter == both else (neurotransmitter,):
key = build_column_key(column, neurotransmitter, dt, heatmap)
spike_detectors[key] = dict()
if multimeter:
multimeters[key] = dict()
for layer in range(len(Cortex)):
neuron_number = len(Cortex[layer][column][neurotransmitter])
if multimeter:
multimeters[key][layer] = nest.Create('multimeter', params=multimeter_param)
nest.Connect(multimeters[key][layer], Cortex[layer][column][neurotransmitter][::neuron_number / N_volt])
spike_detectors[key][layer] = nest.Create('spike_detector', params=detector_param)
nest.Connect(Cortex[layer][column][neurotransmitter][:N_detect], spike_detectors[key][layer])
def set_flag_to_layer(layer, neurotransmitter=Glu, heatmap=True, dt=1, multimeter=False):
for neurotransmitter in (Glu, GABA) if neurotransmitter == both else (neurotransmitter,):
key = build_layer_key(layer, neurotransmitter, dt, heatmap)
spike_detectors[key] = dict()
if multimeter:
multimeters[key] = dict()
for column in range(column_number):
neuron_number = len(Cortex[layer][column][neurotransmitter])
if multimeter:
multimeters[key][column] = nest.Create('multimeter', params=multimeter_param)
nest.Connect(multimeters[key][column], Cortex[layer][column][neurotransmitter][::neuron_number / N_volt])
spike_detectors[key][column] = nest.Create('spike_detector', params=detector_param)
nest.Connect(Cortex[layer][column][neurotransmitter][:N_detect], spike_detectors[key][column])
def save_layer_data(key, value, isMultimeter=False):
"""
:param key:
:param value:
:param isMultimeter:
:return:
"""
# Get parameters from string
params = str(key).split("_")
area = params[0]
layer_name = get_layer_name(int(params[1]))
neurotransmitter = params[2]
parent_dir = "{0}_{1}[{2}]".format(area, layer_name, neurotransmitter)
if not os.path.exists(parent_dir):
os.mkdir(parent_dir)
if isMultimeter:
addres = create_subdir('voltage', parent_dir)
for column, device in value.iteritems():
nest.voltage_trace.from_device(device, title="Membrane potential in {0} column {1}".format(layer_name, column))
plt.savefig("{0}/{1}.{2}".format(addres, column, image_format), dpi=dpi_n, format=image_format)
plt.close()
else:
dt = int(params[3])
heatmap = bool(params[4])
addres = create_subdir('spikes', parent_dir)
for column, device in value.iteritems():
try:
nest.raster_plot.from_device(device, hist=True, title="Spikes {0} column {1}".format(layer_name, column))
plt.savefig("{0}/{1}.{2}".format(addres, column, image_format), dpi=dpi_n, format=image_format)
plt.close()
except nest.NESTError:
print "From column {0} {1}[{2}] activity was not found".format(column, layer_name, neurotransmitter)
if heatmap:
addres = create_subdir('heatmap', parent_dir)
heatmap_builder(addres, value, dt, isColumn=False)
def show_heatmap(filename):
"""Show confusion matrix given of a partis-generated tab-delimited db."""
true_labels, estimated_labels = get_clones_real_estimated(filename)
cm, rows, cols = confusion_matrix(true_labels, estimated_labels)
df = pd.DataFrame(cm, index=rows, columns=cols)
sns.heatmap(df)
sns.plt.show()
def xarr_heatmap(fg, title = None, kwheat = {}, fmt = ('%.3f', '%.2f'), fig = None):
''' Needs seaborn and xarray'''
fig = plt.figure() if fig == None else fig
df = fg.to_pandas()
# format indecies
df.index = [float(fmt[0]%x) for x in df.index]
df.columns = [float(fmt[1]%x) for x in df.columns]
import seaborn as sns
ax = sns.heatmap(df, annot=True, **kwheat)
ax.invert_yaxis()
ax.set_title(title)
ax.set_xlabel(fg.dims[1])
ax.set_ylabel(fg.dims[0])
def plot_filter_heatmap(weights, filename=None):
param_range = abs(weights).max()
fig, ax = plt.subplots(figsize=(weights.shape[1], weights.shape[0]))
sns.heatmap(weights, cmap='RdYlBu_r', linewidths=0.2, vmin=-param_range,
vmax=param_range, ax=ax)
ax.set_xticklabels(range(1, weights.shape[1] + 1))
labels = [ALPHABET_R[i] for i in reversed(range(weights.shape[0]))]
ax.set_yticklabels(labels, rotation='horizontal', size=10)
if filename:
plt.savefig(filename)
plt.close()
def generate_confusion_matrix(y_test, y_pred, labels, title, filename, show=False):
cm = confusion_matrix(y_test, y_pred, labels=labels)
df_cm = pd.DataFrame(cm, index=labels, columns=labels)
plt.figure(figsize=(12,8))
ax = sn.heatmap(df_cm, annot=True)
plt.ylabel("Actual Label", fontsize=14, fontweight='bold')
plt.xlabel("Predicted Label", fontsize=14, fontweight='bold')
plt.title(title, fontsize=16, fontweight='bold')
ttl = ax.title
ttl.set_position([0.5, 1.03])
plt.savefig(filename)
if show:
plt.show()