def init_params(options):
params = OrderedDict()
np.random.seed(0)
inputDimSize = options['inputDimSize']
numAncestors = options['numAncestors']
embDimSize = options['embDimSize']
hiddenDimSize = options['hiddenDimSize'] #hidden layer does not need an extra space
attentionDimSize = options['attentionDimSize']
numClass = options['numClass']
params['W_emb'] = get_random_weight(inputDimSize+numAncestors, embDimSize)
if len(options['embFile']) > 0:
params['W_emb'] = load_embedding(options)
options['embDimSize'] = params['W_emb'].shape[1]
embDimSize = options['embDimSize']
params['W_attention'] = get_random_weight(embDimSize*2, attentionDimSize)
params['b_attention'] = np.zeros(attentionDimSize).astype(config.floatX)
params['v_attention'] = np.random.uniform(-0.1, 0.1, attentionDimSize).astype(config.floatX)
params['W_gru'] = get_random_weight(embDimSize, 3*hiddenDimSize)
params['U_gru'] = get_random_weight(hiddenDimSize, 3*hiddenDimSize)
params['b_gru'] = np.zeros(3 * hiddenDimSize).astype(config.floatX)
params['W_output'] = get_random_weight(hiddenDimSize, numClass)
params['b_output'] = np.zeros(numClass).astype(config.floatX)
return params
评论列表
文章目录