Commit 2ad4e176 authored by Anthony Larcher's avatar Anthony Larcher
Browse files

add activation at the end

parent fa14d338
......@@ -99,8 +99,8 @@ class Xtractor(torch.nn.Module):
# new layer with batch Normalization
seg_emb_2 = self.norm6(self.activation(self.seg_lin1(seg_emb_1)))
# No batch-normalisation after this layer
# seg_emb_3 = self.activation(self.seg_lin2(seg_emb_2))
seg_emb_3 = self.seg_lin2(seg_emb_2)
seg_emb_3 = self.activation(self.seg_lin2(seg_emb_2))
#seg_emb_3 = self.seg_lin2(seg_emb_2)
return seg_emb_3
def LossFN(self, x, lable):
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment