model.py 文件源码

python
阅读 19 收藏 0 点赞 0 评论 0

项目:merlin 作者: CSTR-Edinburgh 项目源码 文件源码
def define_feedforward_model(self):
      layer_list=[]
      with self.graph.as_default() as g:
          is_training_batch=tf.placeholder(tf.bool,shape=(),name="is_training_batch")
          bn_params={"is_training":is_training_batch,"decay":0.99,"updates_collections":None}
          g.add_to_collection("is_training_batch",is_training_batch)
          with tf.name_scope("input"):
              input_layer=tf.placeholder(dtype=tf.float32,shape=(None,self.n_in),name="input_layer")
              if self.dropout_rate!=0.0:
                 print "Using dropout to avoid overfitting and the dropout rate is",self.dropout_rate
                 is_training_drop=tf.placeholder(dtype=tf.bool,shape=(),name="is_training_drop")
                 input_layer_drop=dropout(input_layer,self.dropout_rate,is_training=is_training_drop)
                 layer_list.append(input_layer_drop)
                 g.add_to_collection(name="is_training_drop",value=is_training_drop)
              else:
                 layer_list.append(input_layer)
          g.add_to_collection("input_layer",layer_list[0])
          for i in xrange(len(self.hidden_layer_size)):
              with tf.name_scope("hidden_layer_"+str(i+1)):
                if self.dropout_rate!=0.0:
                    last_layer=layer_list[-1]
                    if self.hidden_layer_type[i]=="tanh":
                       new_layer=fully_connected(last_layer,self.hidden_layer_size[i],activation_fn=tf.nn.tanh,normalizer_fn=batch_norm,\
                                  normalizer_params=bn_params)
                    if self.hidden_layer_type[i]=="sigmoid":
                        new_layer=fully_connected(last_layer,self.hidden_layer_size[i],activation_fn=tf.nn.sigmoid,normalizer_fn=batch_norm,\
                                  normalizer_params=bn_params)
                    new_layer_drop=dropout(new_layer,self.dropout_rate,is_training=is_training_drop)
                    layer_list.append(new_layer_drop)
                else:
                    last_layer=layer_list[-1]
                    if self.hidden_layer_type[i]=="tanh":
                       new_layer=fully_connected(last_layer,self.hidden_layer_size[i],activation_fn=tf.nn.tanh,normalizer_fn=batch_norm,\
                                 normalizer_params=bn_params)
                    if self.hidden_layer_type[i]=="sigmoid":
                       new_layer=fully_connected(last_layer,self.hidden_layer_size[i],activation_fn=tf.nn.sigmoid,normalizer_fn=batch_norm,\
                                 normalizer_params=bn_params)
                    layer_list.append(new_layer)
          with tf.name_scope("output_layer"):
              if self.output_type=="linear":
                 output_layer=fully_connected(layer_list[-1],self.n_out,activation_fn=None)
              if self.output_type=="tanh":
                 output_layer=fully_connected(layer_list[-1],self.n_out,activation_fn=tf.nn.tanh)
              g.add_to_collection(name="output_layer",value=output_layer)
          with tf.name_scope("training_op"):
               if self.optimizer=="adam":
                  self.training_op=tf.train.AdamOptimizer()
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号