Commit 278d2770 authored by Anthony Larcher's avatar Anthony Larcher
Browse files

conflict

parents 1e3cda4b 2ad4e176
......@@ -103,8 +103,8 @@ class Xtractor(torch.nn.Module):
seg_emb_2 = self.dropout_lin1(seg_emb_1)
seg_emb_3 = self.norm6(self.activation(self.seg_lin1(seg_emb_2)))
# No batch-normalisation after this layer
# seg_emb_3 = self.activation(self.seg_lin2(seg_emb_2))
seg_emb_4 = self.seg_lin2(seg_emb_3)
seg_emb_4 = self.activation(self.seg_lin2(seg_emb_3))
#seg_emb_3 = self.seg_lin2(seg_emb_2)
return seg_emb_4
def LossFN(self, x, lable):
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment