def broadcast(self, input_, norm, classifier=None, aif_scale=1):
"""
Used for high level broadcast, in which the input is processed in bulk instead of per-component, i.e., per byte.
input_ of len 1-256 (for proper CM1K emulation, otherwise unlimited)
norm: A DistNorm enum
classifier: A Classifier enum
aif_scale: Modify the aif when determining whether the fire. The aif can also be permanently scaled via
CM1KEmulator.scale_all_aifs(), but this parameter enables the same behavior without altering the neuron.
"""
# Called from CM1KEmulator.broadcast()
log.trace("Neuron.broadcast()")
# This shouldn't be necessary. This function should only be called on committed and the rtl neurons.
if self.state == NeuronState.idle:
log.error("Neuron.broadcast() called on idle neuron")
return
self.dist = 0 # NOTE: Not sure this is necessary. Also, undecided whether this should simply call reset_dist().
if norm == cm1k.CM1KDistNorm.l1:
for i, comp in enumerate(input_):
self.dist += abs(comp - self.pattern[i])
elif norm == cm1k.CM1KDistNorm.lsup:
for i, comp in enumerate(input_):
self.dist = max(abs(comp - self.pattern[i]), self.dist)
elif norm == cm1k.CM1KDistNorm.euc:
for i, comp in enumerate(input_):
self.dist += (comp - self.pattern[i]) * (comp - self.pattern[i])
self.dist = int(round(math.sqrt(self.dist)))
log.log("Single neuron cat{} dist: {:>5} < {:>5} ?".format(self.cat, self.dist, self.aif))
# TODO: Use the minimum and maximum AIFs of each neuron (i.e., of each context)
aif = self.aif if aif_scale == 1 else min(max(int(round(self.aif * aif_scale)), 0), 0xFFFF)
if (classifier == cm1k.CM1KClassifier.rbf and self.dist < aif) or classifier == cm1k.CM1KClassifier.knn:
# The neuron has fired
log.log("Fire with dist{} aif{} cat{}".format(self.dist, aif, self.cat))
self.chip.store_firing_neuron(self)
评论列表
文章目录