def addInitialRNNLayer(mainGraph,
inputOperation=None,
activation=TanhActivation,
nHidden=100):
"""Add a RNN layer to input data
Parameters
----------
mainGraph : ga.Graph
computation graph to which append the dense layer
inputOperation : ga.Operation
operation feeding the data to the layer, must have a shape of
(nExamples, seriesLength, nFeatures)
activation : ga.SingleInputOperation [class]
activatin operation for hidden units
nHidden : int
number of hidden units
Returns
-------
list(ga.Operation)
List of activation operations from the RNN layer
"""
nExamples, seriesLength, nFeatures = inputOperation.shape
h0 = generateZeroVariable(shape=(nExamples, nHidden),
transpose=False)
W = generateRandomVariable(shape=(nFeatures, nHidden),
transpose=False, nInputs=nFeatures * seriesLength ** 3)
U = generateRandomVariable(shape=(nHidden, nHidden),
transpose=False, nInputs=nHidden * seriesLength ** 3)
B = generateRandomVariable(shape=(1, nHidden),
transpose=False, nInputs=nHidden * seriesLength ** 3)
h0op = mainGraph.addOperation(h0)
Wop = mainGraph.addOperation(W, doGradient=True)
Uop = mainGraph.addOperation(U, doGradient=True)
Bop = mainGraph.addOperation(B, doGradient=True)
hactivations = [h0op]
# ------ append activation gates
for indexRNN in range(seriesLength):
xSliceop = mainGraph.addOperation(SliceOperation(inputOperation,
np.index_exp[:, indexRNN, :]))
newHActiv = createRNNgate(mainGraph,
xSliceop,
hactivations[-1],
Wop,
Uop,
Bop,
activation)
hactivations.append(newHActiv)
return hactivations
recurrentNeuralNetwork.py 文件源码
python
阅读 21
收藏 0
点赞 0
评论 0
评论列表
文章目录