def create_optimiser(optimiser):
"""
Creates a function that returns an optimiser and (optional) a learn
rate schedule
"""
if optimiser['schedule'] is not None:
# if we have a learn rate schedule, create a theano shared
# variable and a corresponding update
lr = theano.shared(np.float32(optimiser['params']['learning_rate']))
# create a copy of the optimiser config dict so we do not change
# it
from copy import deepcopy
optimiser = deepcopy(optimiser)
optimiser['params']['learning_rate'] = lr
lrs = nn.LearnRateSchedule(learning_rate=lr, **optimiser['schedule'])
else:
lrs = None
return partial(getattr(lnn.updates, optimiser['name']),
**optimiser['params']), lrs
评论列表
文章目录