def stabilize(self, prior_columns, percent):
"""
This activates prior columns to force active in order to maintain
the given percent of column overlap between time steps. Always call
this between compute and learn!
"""
# num_active = (len(self.columns) + len(prior_columns)) / 2
num_active = len(self.columns)
overlap = self.columns.overlap(prior_columns)
stabile_columns = int(round(num_active * overlap))
target_columns = int(round(num_active * percent))
add_columns = target_columns - stabile_columns
if add_columns <= 0:
return
eligable_columns = np.setdiff1d(prior_columns.flat_index, self.columns.flat_index)
eligable_excite = self.raw_excitment[eligable_columns]
selected_col_nums = np.argpartition(-eligable_excite, add_columns-1)[:add_columns]
selected_columns = eligable_columns[selected_col_nums]
selected_index = np.unravel_index(selected_columns, self.columns.dimensions)
# Learn. Note: selected columns will learn twice. The previously
# active segments learn now, the current most excited segments in the
# method SP.learn().
# Or learn not at all if theres a bug in my code...
# if self.multisegment:
# if hasattr(self, 'prior_segment_excitement'):
# segment_excitement = self.prior_segment_excitement[selected_index]
# seg_idx = np.argmax(segment_excitement, axis=-1)
# self.proximal.learn_outputs(input_sdr=input_sdr,
# output_sdr=selected_index + (seg_idx,))
# self.prev_segment_excitement = self.segment_excitement
# else:
# 1/0
self.columns.flat_index = np.concatenate([self.columns.flat_index, selected_columns])
评论列表
文章目录