def __init__(self, output_dim, inner_dim, depth = 2, init_output='uniform',
activation_output='softmax', init_inner='identity',
activation_inner='linear', scale_output=0.01, padding=False, **kwargs):
if depth < 1:
quit('Cannot use GraphFP with depth zero')
self.init_output = initializations.get(init_output)
self.activation_output = activations.get(activation_output)
self.init_inner = initializations.get(init_inner)
self.activation_inner = activations.get(activation_inner)
self.output_dim = output_dim
self.inner_dim = inner_dim
self.depth = depth
self.scale_output = scale_output
self.padding = padding
self.initial_weights = None
self.input_dim = 4 # each entry is a 3D N_atom x N_atom x N_feature tensor
if self.input_dim:
kwargs['input_shape'] = (None, None, None,) # 3D tensor for each input
#self.input = K.placeholder(ndim = 4)
super(GraphFP, self).__init__(**kwargs)
GraphEmbedding_sumAfter.py 文件源码
python
阅读 19
收藏 0
点赞 0
评论 0
评论列表
文章目录